diff options
author | 2023-05-16 17:07:40 -0700 | |
---|---|---|
committer | 2023-05-16 17:07:40 -0700 | |
commit | 1ad8c54c90438c156ad068bdee1d70dfb9404db3 (patch) | |
tree | 9977d0613f0bc0842cc7241be1e849840ec49fa9 | |
parent | 78229da76048e72aa4d92516e296a57484450a30 (diff) | |
download | bun-1ad8c54c90438c156ad068bdee1d70dfb9404db3.tar.gz bun-1ad8c54c90438c156ad068bdee1d70dfb9404db3.tar.zst bun-1ad8c54c90438c156ad068bdee1d70dfb9404db3.zip |
fix asset naming output paths (#2904)
* rename to `src_path` and `dest_path`, use `dest_path` for output
* format
* option for compile
-rw-r--r-- | .prettierignore | 2 | ||||
-rw-r--r-- | bench/bundle/index.ts | 2 | ||||
-rw-r--r-- | src/bundler.zig | 2 | ||||
-rw-r--r-- | src/bundler/bundle_v2.zig | 42 | ||||
-rw-r--r-- | src/cli/build_command.zig | 23 | ||||
-rw-r--r-- | src/http.zig | 12 | ||||
-rw-r--r-- | src/options.zig | 24 | ||||
-rw-r--r-- | src/standalone_bun.zig | 4 |
8 files changed, 68 insertions, 43 deletions
diff --git a/.prettierignore b/.prettierignore index 006398f8c..e48c19844 100644 --- a/.prettierignore +++ b/.prettierignore @@ -10,3 +10,5 @@ test/snapshots test/snapshots-no-hmr test/js/deno/*.test.ts test/js/deno/**/*.test.ts +bench/react-hello-world/react-hello-world.node.js + diff --git a/bench/bundle/index.ts b/bench/bundle/index.ts index f67b2c645..2a5e4b80c 100644 --- a/bench/bundle/index.ts +++ b/bench/bundle/index.ts @@ -1 +1 @@ -console.log("Hello via Bun!");
\ No newline at end of file +console.log("Hello via Bun!"); diff --git a/src/bundler.zig b/src/bundler.zig index 7bf7d6ba3..fd4e11d9a 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -960,7 +960,7 @@ pub const Bundler = struct { file_path.pretty = Linker.relative_paths_list.append(string, bundler.fs.relativeTo(file_path.text)) catch unreachable; var output_file = options.OutputFile{ - .input = file_path, + .src_path = file_path, .loader = loader, .value = undefined, }; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 82e6bdc6d..7f2437619 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1250,8 +1250,8 @@ pub const BundleV2 = struct { defer build.output_files.deinit(); var to_assign_on_sourcemap: JSC.JSValue = .zero; for (output_files, 0..) |*output_file, i| { - defer bun.default_allocator.free(output_file.input.text); - defer bun.default_allocator.free(output_file.path); + defer bun.default_allocator.free(output_file.src_path.text); + defer bun.default_allocator.free(output_file.dest_path); const result = output_file.toJS( if (!this.config.outdir.isEmpty()) if (std.fs.path.isAbsolute(this.config.outdir.list.items)) @@ -1259,7 +1259,7 @@ pub const BundleV2 = struct { u8, bun.path.joinAbsString( this.config.outdir.toOwnedSliceLeaky(), - &[_]string{output_file.path}, + &[_]string{output_file.dest_path}, .auto, ), ) catch unreachable @@ -1268,14 +1268,14 @@ pub const BundleV2 = struct { u8, bun.path.joinAbsString( Fs.FileSystem.instance.top_level_dir, - &[_]string{ this.config.dir.toOwnedSliceLeaky(), this.config.outdir.toOwnedSliceLeaky(), output_file.path }, + &[_]string{ this.config.dir.toOwnedSliceLeaky(), this.config.outdir.toOwnedSliceLeaky(), output_file.dest_path }, .auto, ), ) catch unreachable else bun.default_allocator.dupe( u8, - output_file.path, + output_file.dest_path, ) catch unreachable, globalThis, ); @@ -8901,6 +8901,11 @@ const LinkerContext = struct { const root_path = c.resolver.opts.output_dir; + if (root_path.len == 0 and c.parse_graph.additional_output_files.items.len > 0 and !c.resolver.opts.compile) { + try c.log.addError(null, Logger.Loc.Empty, "cannot write multiple output files without an output directory"); + return error.MultipleOutputFilesWithoutOutputDir; + } + if (root_path.len > 0) { try c.writeOutputFilesToDisk(root_path, chunks, react_client_components_manifest, &output_files); } else { @@ -9334,6 +9339,19 @@ const LinkerContext = struct { src.value.buffer.allocator.free(bytes); } + if (std.fs.path.dirname(src.dest_path)) |rel_parent| { + if (rel_parent.len > 0) { + root_dir.dir.makePath(rel_parent) catch |err| { + c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "{s} creating outdir {} while saving file {}", .{ + @errorName(err), + bun.fmt.quote(rel_parent), + bun.fmt.quote(src.dest_path), + }) catch unreachable; + return err; + }; + } + } + switch (JSC.Node.NodeFS.writeFileWithPathBuffer( &pathbuf, JSC.Node.Arguments.WriteFile{ @@ -9351,15 +9369,15 @@ const LinkerContext = struct { .dirfd = @intCast(bun.FileDescriptor, root_dir.dir.fd), .file = .{ .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(src.input.text), + .string = JSC.PathString.init(src.dest_path), }, }, }, )) { .err => |err| { - c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "{} writing chunk {}", .{ + c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "{} writing file {}", .{ bun.fmt.quote(err.toSystemError().message.slice()), - bun.fmt.quote(src.input.text), + bun.fmt.quote(src.src_path.text), }) catch unreachable; return error.WriteFailed; }, @@ -10752,7 +10770,7 @@ pub const Chunk = struct { .chunk, .asset => { const index = piece.index.index; const file_path = switch (piece.index.kind) { - .asset => graph.additional_output_files.items[additional_files[index].last().?.output_file].input.text, + .asset => graph.additional_output_files.items[additional_files[index].last().?.output_file].src_path.text, .chunk => chunks[index].final_rel_path, else => unreachable, }; @@ -10803,7 +10821,7 @@ pub const Chunk = struct { .asset => { shift.before.advance(unique_key_for_additional_files[index]); const file = graph.additional_output_files.items[additional_files[index].last().?.output_file]; - break :brk file.input.text; + break :brk file.src_path.text; }, .chunk => { const piece_chunk = chunks[index]; @@ -10928,7 +10946,7 @@ pub const Chunk = struct { const output_file = files.last().?.output_file; - break :brk graph.additional_output_files.items[output_file].path; + break :brk graph.additional_output_files.items[output_file].dest_path; }, .chunk => chunks[index].final_rel_path, else => unreachable, @@ -10969,7 +10987,7 @@ pub const Chunk = struct { const output_file = files.last().?.output_file; - break :brk graph.additional_output_files.items[output_file].path; + break :brk graph.additional_output_files.items[output_file].dest_path; }, .chunk => chunks[index].final_rel_path, else => unreachable, diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index e419b649c..6609facac 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -53,6 +53,9 @@ pub const BuildCommand = struct { this_bundler.options.source_map = options.SourceMapOption.fromApi(ctx.args.source_map); this_bundler.resolver.opts.source_map = options.SourceMapOption.fromApi(ctx.args.source_map); + this_bundler.options.compile = ctx.bundler_options.compile; + this_bundler.resolver.opts.compile = ctx.bundler_options.compile; + if (this_bundler.options.source_map == .external and ctx.bundler_options.outdir.len == 0 and !ctx.bundler_options.compile) { Output.prettyErrorln("<r><red>error<r><d>:<r> cannot use an external source map without --outdir", .{}); Global.exit(1); @@ -274,7 +277,7 @@ pub const BuildCommand = struct { var output_dir = this_bundler.options.output_dir; if (outfile.len > 0 and output_files.len == 1 and output_files[0].value == .buffer) { output_dir = std.fs.path.dirname(outfile) orelse "."; - output_files[0].path = std.fs.path.basename(outfile); + output_files[0].dest_path = std.fs.path.basename(outfile); } if (!ctx.bundler_options.compile) { @@ -302,14 +305,14 @@ pub const BuildCommand = struct { var all_paths = try ctx.allocator.alloc([]const u8, output_files.len); var max_path_len: usize = 0; for (all_paths, output_files) |*dest, src| { - dest.* = src.path; + dest.* = src.dest_path; } var from_path = resolve_path.longestCommonPath(all_paths); for (output_files) |f| { max_path_len = std.math.max( - std.math.max(from_path.len, f.path.len) + 2 - from_path.len, + std.math.max(from_path.len, f.dest_path.len) + 2 - from_path.len, max_path_len, ); } @@ -367,17 +370,17 @@ pub const BuildCommand = struct { switch (f.value) { // Nothing to do in this case .saved => { - rel_path = f.path; - if (f.path.len > from_path.len) { - rel_path = resolve_path.relative(from_path, f.path); + rel_path = f.dest_path; + if (f.dest_path.len > from_path.len) { + rel_path = resolve_path.relative(from_path, f.dest_path); } }, // easy mode: write the buffer .buffer => |value| { - rel_path = f.path; - if (f.path.len > from_path.len) { - rel_path = resolve_path.relative(from_path, f.path); + rel_path = f.dest_path; + if (f.dest_path.len > from_path.len) { + rel_path = resolve_path.relative(from_path, f.dest_path); if (std.fs.path.dirname(rel_path)) |parent| { if (parent.len > root_path.len) { try root_dir.dir.makePath(parent); @@ -416,7 +419,7 @@ pub const BuildCommand = struct { } }, .move => |value| { - const primary = f.path[from_path.len..]; + const primary = f.dest_path[from_path.len..]; bun.copy(u8, filepath_buf[2..], primary); rel_path = filepath_buf[0 .. primary.len + 2]; rel_path = value.pathname; diff --git a/src/http.zig b/src/http.zig index a01fd3e1a..c54f4ea9c 100644 --- a/src/http.zig +++ b/src/http.zig @@ -2252,7 +2252,7 @@ pub const RequestContext = struct { return; }; - const hash = Watcher.getHash(result.file.input.text); + const hash = Watcher.getHash(result.file.src_path.text); const input_fd = if (ctx.watcher.indexOf(hash)) |ind| if (ind > 0) ctx.watcher.watchlist.items(.fd)[ind] else null else @@ -2448,7 +2448,7 @@ pub const RequestContext = struct { SocketPrinterInternal.reserveNext, SocketPrinterInternal.advanceBy, ); - const loader = ctx.bundler.options.loaders.get(result.file.input.name.ext) orelse .file; + const loader = ctx.bundler.options.loaders.get(result.file.src_path.name.ext) orelse .file; var socket_printer = SocketPrinter.init( SocketPrinterInternal.init(ctx, loader), @@ -2512,7 +2512,7 @@ pub const RequestContext = struct { if (written.input_fd) |written_fd| { try ctx.watcher.addFile( written_fd, - result.file.input.text, + result.file.src_path.text, hash, loader, resolve_result.dirname_fd, @@ -2574,8 +2574,8 @@ pub const RequestContext = struct { if (ctx.watcher.addFile( file.fd, - result.file.input.text, - Watcher.getHash(result.file.input.text), + result.file.src_path.text, + Watcher.getHash(result.file.src_path.text), result.file.loader, file.dir, null, @@ -2595,7 +2595,7 @@ pub const RequestContext = struct { var weak_etag = std.hash.Wyhash.init(0); weak_etag_buffer[0] = 'W'; weak_etag_buffer[1] = '/'; - weak_etag.update(result.file.input.text); + weak_etag.update(result.file.src_path.text); std.mem.writeIntNative(u64, weak_etag_tmp_buffer[0..8], result.file.size); weak_etag.update(weak_etag_tmp_buffer[0..8]); diff --git a/src/options.zig b/src/options.zig index a920c01c8..9f5d3c552 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1454,6 +1454,8 @@ pub const BundleOptions = struct { minify_syntax: bool = false, minify_identifiers: bool = false, + compile: bool = false, + /// This is a list of packages which even when require() is used, we will /// instead convert to ESM import statements. /// @@ -1986,7 +1988,7 @@ pub const TransformOptions = struct { pub const OutputFile = struct { loader: Loader, input_loader: Loader = .js, - input: Fs.Path, + src_path: Fs.Path, value: Value, size: usize = 0, size_without_sourcemap: usize = 0, @@ -1995,7 +1997,7 @@ pub const OutputFile = struct { is_executable: bool = false, source_map_index: u32 = std.math.maxInt(u32), output_kind: JSC.API.BuildArtifact.OutputKind = .chunk, - path: []const u8 = "", + dest_path: []const u8 = "", // Depending on: // - The target @@ -2072,7 +2074,7 @@ pub const OutputFile = struct { pub fn initPending(loader: Loader, pending: resolver.Result) OutputFile { return .{ .loader = loader, - .input = pending.pathConst().?.*, + .src_path = pending.pathConst().?.*, .size = 0, .value = .{ .pending = pending }, }; @@ -2081,7 +2083,7 @@ pub const OutputFile = struct { pub fn initFile(file: std.fs.File, pathname: string, size: usize) OutputFile { return .{ .loader = .file, - .input = Fs.Path.init(pathname), + .src_path = Fs.Path.init(pathname), .size = size, .value = .{ .copy = FileOperation.fromFile(file.handle, pathname) }, }; @@ -2122,8 +2124,8 @@ pub const OutputFile = struct { return OutputFile{ .loader = options.loader, .input_loader = options.input_loader, - .input = Fs.Path.init(options.input_path), - .path = options.output_path, + .src_path = Fs.Path.init(options.input_path), + .dest_path = options.output_path, .size = options.size orelse switch (options.data) { .buffer => |buf| buf.data.len, .file => |file| file.size, @@ -2151,7 +2153,7 @@ pub const OutputFile = struct { pub fn initBuf(buf: []const u8, allocator: std.mem.Allocator, pathname: string, loader: Loader, hash: ?u64, source_map_index: ?u32) OutputFile { return .{ .loader = loader, - .input = Fs.Path.init(pathname), + .src_path = Fs.Path.init(pathname), .size = buf.len, .hash = hash orelse 0, .source_map_index = source_map_index orelse std.math.maxInt(u32), @@ -2175,7 +2177,7 @@ pub const OutputFile = struct { const fd_out = file_out.handle; var do_close = false; // TODO: close file_out on error - const fd_in = (try std.fs.openFileAbsolute(file.input.text, .{ .mode = .read_only })).handle; + const fd_in = (try std.fs.openFileAbsolute(file.src_path.text, .{ .mode = .read_only })).handle; if (Environment.isWindows) { Fs.FileSystem.setMaxFd(fd_out); @@ -2230,11 +2232,11 @@ pub const OutputFile = struct { }, .saved => brk: { var build_output = bun.default_allocator.create(JSC.API.BuildArtifact) catch @panic("Unable to allocate Artifact"); - const path_to_use = owned_pathname orelse this.input.text; + const path_to_use = owned_pathname orelse this.src_path.text; var file_blob = JSC.WebCore.Blob.Store.initFile( JSC.Node.PathOrFileDescriptor{ - .path = JSC.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.input.text) catch unreachable)) }, + .path = JSC.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) }, }, this.loader.toMimeType(), globalObject.allocator(), @@ -2269,7 +2271,7 @@ pub const OutputFile = struct { .hash = this.hash, .loader = this.input_loader, .output_kind = this.output_kind, - .path = owned_pathname orelse bun.default_allocator.dupe(u8, this.input.text) catch unreachable, + .path = owned_pathname orelse bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable, }; break :brk build_output.toJS(globalObject); }, diff --git a/src/standalone_bun.zig b/src/standalone_bun.zig index c44002505..30a12f0dd 100644 --- a/src/standalone_bun.zig +++ b/src/standalone_bun.zig @@ -110,7 +110,7 @@ pub const StandaloneModuleGraph = struct { var string_builder = bun.StringBuilder{}; var module_count: usize = 0; for (output_files, 0..) |output_file, i| { - string_builder.count(output_file.path); + string_builder.count(output_file.dest_path); string_builder.count(prefix); if (output_file.value == .buffer) { if (output_file.output_kind == .sourcemap) { @@ -153,7 +153,7 @@ pub const StandaloneModuleGraph = struct { } var module = CompiledModuleGraphFile{ - .name = string_builder.fmtAppendCount("{s}{s}", .{ prefix, output_file.path }), + .name = string_builder.fmtAppendCount("{s}{s}", .{ prefix, output_file.dest_path }), .loader = output_file.loader, .contents = string_builder.appendCount(output_file.value.buffer.bytes), }; |