diff options
| -rw-r--r-- | src/bun.js/api/JSBundler.zig | 18 | ||||
| -rw-r--r-- | src/bundler/bundle_v2.zig | 304 | ||||
| -rw-r--r-- | src/js_ast.zig | 2 | ||||
| -rw-r--r-- | src/js_parser.zig | 14 | ||||
| -rw-r--r-- | src/options.zig | 22 | 
5 files changed, 319 insertions, 41 deletions
| diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 60933f3dc..8a4d62b13 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -63,6 +63,7 @@ pub const JSBundler = struct {          label: OwnedString = OwnedString.initEmpty(bun.default_allocator),          external: bun.StringSet = bun.StringSet.init(bun.default_allocator),          sourcemap: options.SourceMapOption = .none, +        public_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),          pub const List = bun.StringArrayHashMapUnmanaged(Config); @@ -77,6 +78,7 @@ pub const JSBundler = struct {                  .names = .{                      .owned_entry_point = OwnedString.initEmpty(allocator),                      .owned_chunk = OwnedString.initEmpty(allocator), +                    .owned_asset = OwnedString.initEmpty(allocator),                  },              };              errdefer this.deinit(allocator); @@ -159,6 +161,11 @@ pub const JSBundler = struct {                  this.dir.appendSliceExact(globalThis.bunVM().bundler.fs.top_level_dir) catch unreachable;              } +            if (try config.getOptional(globalThis, "publicPath", ZigString.Slice)) |slice| { +                defer slice.deinit(); +                this.public_path.appendSliceExact(slice.slice()) catch unreachable; +            } +              if (try config.getObject(globalThis, "naming")) |naming| {                  if (try naming.getOptional(globalThis, "entrypoint", ZigString.Slice)) |slice| {                      defer slice.deinit(); @@ -171,6 +178,12 @@ pub const JSBundler = struct {                      this.names.owned_chunk.appendSliceExact(slice.slice()) catch unreachable;                      this.names.chunk.data = this.names.owned_chunk.list.items;                  } + +                if (try naming.getOptional(globalThis, "asset", ZigString.Slice)) |slice| { +                    defer slice.deinit(); +                    this.names.owned_asset.appendSliceExact(slice.slice()) catch unreachable; +                    this.names.asset.data = this.names.owned_asset.list.items; +                }              }              if (try config.getArray(globalThis, "plugins")) |array| { @@ -283,9 +296,13 @@ pub const JSBundler = struct {              owned_chunk: OwnedString = OwnedString.initEmpty(bun.default_allocator),              chunk: options.PathTemplate = options.PathTemplate.chunk, +            owned_asset: OwnedString = OwnedString.initEmpty(bun.default_allocator), +            asset: options.PathTemplate = options.PathTemplate.asset, +              pub fn deinit(self: *Names) void {                  self.owned_entry_point.deinit();                  self.owned_chunk.deinit(); +                self.owned_asset.deinit();              }          }; @@ -328,6 +345,7 @@ pub const JSBundler = struct {              self.names.deinit();              self.label.deinit();              self.outdir.deinit(); +            self.public_path.deinit();          }      }; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 8277007d0..6bb19587a 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -273,6 +273,8 @@ pub const BundleV2 = struct {      /// Allocations not tracked by a threadlocal heap      free_list: std.ArrayList(string) = std.ArrayList(string).init(bun.default_allocator), +    unique_key: u64 = 0, +      const debug = Output.scoped(.Bundle, false);      pub inline fn loop(this: *BundleV2) *EventLoop { @@ -716,6 +718,7 @@ pub const BundleV2 = struct {          enable_reloading: bool,      ) !std.ArrayList(options.OutputFile) {          var this = try BundleV2.init(bundler, allocator, event_loop, enable_reloading, null, null); +        this.unique_key = unique_key;          if (this.bundler.log.msgs.items.len > 0) {              return error.BuildFailed; @@ -733,19 +736,80 @@ pub const BundleV2 = struct {              return error.BuildFailed;          } +        const reachable_files = try this.findReachableFiles(); + +        try this.processFilesToCopy(reachable_files); +          try this.cloneAST();          var chunks = try this.linker.link(              this,              this.graph.entry_points.items,              this.graph.use_directive_entry_points, -            try this.findReachableFiles(), +            reachable_files,              unique_key,          );          return try this.linker.generateChunksInParallel(chunks);      } +    pub fn processFilesToCopy( +        this: *BundleV2, +        reachable_files: []const Index, +    ) !void { +        if (this.graph.estimated_file_loader_count > 0) { +            const unique_key_for_additional_files = this.graph.input_files.items(.unique_key_for_additional_file); +            const content_hashes_for_additional_files = this.graph.input_files.items(.content_hash_for_additional_file); +            const sources = this.graph.input_files.items(.source); +            var additional_output_files = std.ArrayList(options.OutputFile).init(this.bundler.allocator); + +            var additional_files: []BabyList(AdditionalFile) = this.graph.input_files.items(.additional_files); +            for (reachable_files) |reachable_source| { +                const index = reachable_source.get(); +                const key = unique_key_for_additional_files[index]; +                if (key.len > 0) { +                    var template = PathTemplate.asset; +                    if (this.bundler.options.asset_names.len > 0) +                        template.data = this.bundler.options.asset_names; +                    const source = &sources[index]; +                    var pathname = source.path.name; +                    // TODO: outbase +                    const rel = bun.path.relative(this.bundler.fs.top_level_dir, source.path.text); +                    if (rel.len > 0 and rel[0] != '.') +                        pathname = Fs.PathName.init(rel); + +                    template.placeholder.name = pathname.base; +                    template.placeholder.dir = pathname.dir; +                    template.placeholder.ext = pathname.ext; +                    if (template.placeholder.ext.len > 0 and template.placeholder.ext[0] == '.') +                        template.placeholder.ext = template.placeholder.ext[1..]; + +                    if (template.needs(.hash)) { +                        template.placeholder.hash = content_hashes_for_additional_files[index]; +                    } + +                    const loader = source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file; + +                    additional_output_files.append( +                        options.OutputFile.initBuf( +                            source.contents, +                            bun.default_allocator, +                            std.fmt.allocPrint(bun.default_allocator, "{}", .{ +                                template, +                            }) catch unreachable, +                            loader, +                        ), +                    ) catch unreachable; +                    additional_files[index].push(this.graph.allocator, AdditionalFile{ +                        .output_file = @truncate(u32, additional_output_files.items.len - 1), +                    }) catch unreachable; +                } +            } + +            this.graph.additional_output_files = additional_output_files.moveToUnmanaged(); +        } +    } +      pub fn generateFromJavaScript(          config: bun.JSC.API.JSBundler.Config,          plugins: ?*bun.JSC.API.JSBundler.Plugin, @@ -1169,7 +1233,13 @@ pub const BundleV2 = struct {              completion.env,          );          bundler.options.jsx = config.jsx; +          bundler.options.entry_names = config.names.entry_point.data; +        bundler.options.chunk_names = config.names.chunk.data; +        bundler.options.asset_names = config.names.asset.data; + +        bundler.options.public_path = config.public_path.list.items; +          bundler.options.output_dir = config.outdir.toOwnedSliceLeaky();          bundler.options.minify_syntax = config.minify.syntax;          bundler.options.minify_whitespace = config.minify.whitespace; @@ -1233,6 +1303,8 @@ pub const BundleV2 = struct {      }      pub fn runFromJSInNewThread(this: *BundleV2, config: *const bun.JSC.API.JSBundler.Config) !std.ArrayList(options.OutputFile) { +        this.unique_key = std.crypto.random.int(u64); +          if (this.bundler.log.errors > 0) {              return error.BuildFailed;          } @@ -1263,12 +1335,16 @@ pub const BundleV2 = struct {              bun.Mimalloc.mi_collect(true);          } +        const reachable_files = try this.findReachableFiles(); + +        try this.processFilesToCopy(reachable_files); +          var chunks = try this.linker.link(              this,              this.graph.entry_points.items,              this.graph.use_directive_entry_points, -            try this.findReachableFiles(), -            std.crypto.random.int(u64), +            reachable_files, +            this.unique_key,          );          if (this.bundler.log.errors > 0) { @@ -1351,6 +1427,7 @@ pub const BundleV2 = struct {              graph.parse_pending + @intCast(usize, diff)          else              graph.parse_pending - @intCast(usize, -diff); +          switch (parse_result.value) {              .empty => |empty_result| {                  var input_files = graph.input_files.slice(); @@ -1395,17 +1472,21 @@ pub const BundleV2 = struct {                              ) catch {};                          }                      } - -                    var input_files = graph.input_files.slice(); -                    input_files.items(.source)[result.source.index.get()] = result.source; -                    debug("onParse({d}, {s}) = {d} imports, {d} exports", .{ -                        result.source.index.get(), -                        result.source.path.text, -                        result.ast.import_records.len, -                        result.ast.named_exports.count(), -                    });                  } +                // Warning: this array may resize in this function call +                // do not reuse it. +                graph.input_files.items(.source)[result.source.index.get()] = result.source; +                graph.input_files.items(.unique_key_for_additional_file)[result.source.index.get()] = result.unique_key_for_additional_file; +                graph.input_files.items(.content_hash_for_additional_file)[result.source.index.get()] = result.content_hash_for_additional_file; + +                debug("onParse({d}, {s}) = {d} imports, {d} exports", .{ +                    result.source.index.get(), +                    result.source.path.text, +                    result.ast.import_records.len, +                    result.ast.named_exports.count(), +                }); +                  var iter = result.resolve_queue.iterator();                  defer result.resolve_queue.deinit(); @@ -1426,28 +1507,45 @@ pub const BundleV2 = struct {                      }                      if (!existing.found_existing) { -                        var new_task = value; +                        var new_task: *ParseTask = value;                          var new_input_file = Graph.InputFile{                              .source = Logger.Source.initEmptyFile(new_task.path.text),                              .side_effects = value.side_effects,                          }; + +                        const loader = new_task.loader orelse new_input_file.source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file; +                          new_input_file.source.index = Index.source(graph.input_files.len);                          new_input_file.source.path = new_task.path;                          new_input_file.source.key_path = new_input_file.source.path; -                        // graph.source_index_map.put(graph.allocator, new_input_file.source.index.get, new_input_file.source) catch unreachable;                          existing.value_ptr.* = new_input_file.source.index.get();                          new_task.source_index = new_input_file.source.index; +                          new_task.ctx = this;                          graph.input_files.append(graph.allocator, new_input_file) catch unreachable;                          graph.ast.append(graph.allocator, js_ast.Ast.empty) catch unreachable;                          diff += 1; +                        if (loader.shouldCopyForBundling()) { +                            var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; +                            additional_files.push(this.graph.allocator, .{ .source_index = new_task.source_index.get() }) catch unreachable; +                            new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data; +                            graph.estimated_file_loader_count += 1; +                        } +                          if (this.enqueueOnLoadPluginIfNeeded(new_task)) {                              continue;                          }                          batch.push(ThreadPoolLib.Batch.from(&new_task.task));                      } else { +                        const loader = value.loader orelse graph.input_files.items(.source)[existing.value_ptr.*].path.loader(&this.bundler.options.loaders) orelse options.Loader.file; +                        if (loader.shouldCopyForBundling()) { +                            var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; +                            additional_files.push(this.graph.allocator, .{ .source_index = existing.value_ptr.* }) catch unreachable; +                            graph.estimated_file_loader_count += 1; +                        } +                          bun.default_allocator.destroy(value);                      }                  } @@ -1596,6 +1694,12 @@ pub const ParseTask = struct {              use_directive: UseDirective = .none,              watcher_data: WatcherData = .{},              side_effects: ?_resolver.SideEffects = null, + +            /// Used by "file" loader files. +            unique_key_for_additional_file: []const u8 = "", + +            /// Used by "file" loader files. +            content_hash_for_additional_file: u64 = 0,          };          pub const Error = struct { @@ -1633,6 +1737,8 @@ pub const ParseTask = struct {          resolver: *Resolver,          source: Logger.Source,          loader: Loader, +        unique_key_prefix: u64, +        unique_key_for_additional_file: *[]const u8,      ) !js_ast.Ast {          switch (loader) {              .jsx, .tsx, .js, .ts => { @@ -1662,6 +1768,15 @@ pub const ParseTask = struct {                  }, Logger.Loc{ .start = 0 });                  return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?;              }, +            // TODO: css +            .css, .file => { +                const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; +                const root = Expr.init(E.String, E.String{ +                    .data = unique_key, +                }, Logger.Loc{ .start = 0 }); +                unique_key_for_additional_file.* = unique_key; +                return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?; +            },              else => {                  const root = Expr.init(E.String, E.String{                      .data = source.path.text, @@ -1685,6 +1800,7 @@ pub const ParseTask = struct {          var resolver: *Resolver = &bundler.resolver;          var file_path = task.path;          step.* = .read_file; +        const loader = task.loader orelse file_path.loader(&bundler.options.loaders) orelse options.Loader.file;          var entry: CacheEntry = switch (task.contents_or_fd) {              .fd => brk: { @@ -1725,7 +1841,11 @@ pub const ParseTask = struct {                      };                  break :brk resolver.caches.fs.readFileWithAllocator( -                    allocator, +                    if (loader.shouldCopyForBundling()) +                        // The OutputFile will own the memory for the contents +                        bun.default_allocator +                    else +                        allocator,                      bundler.fs,                      file_path.text,                      task.contents_or_fd.fd.dir, @@ -1790,7 +1910,6 @@ pub const ParseTask = struct {          };          const source_dir = file_path.sourceDir(); -        const loader = task.loader orelse file_path.loader(&bundler.options.loaders) orelse options.Loader.file;          const platform = use_directive.platform(task.known_platform orelse bundler.options.platform);          var resolve_queue = ResolveQueue.init(bun.default_allocator); @@ -1821,8 +1940,10 @@ pub const ParseTask = struct {          task.jsx.parse = loader.isJSX(); +        var unique_key_for_additional_file: []const u8 = ""; +          var ast: js_ast.Ast = if (!is_empty) -            try getAST(log, bundler, opts, allocator, resolver, source, loader) +            try getAST(log, bundler, opts, allocator, resolver, source, loader, task.ctx.unique_key, &unique_key_for_additional_file)          else              try getEmptyAST(log, bundler, opts, allocator, source); @@ -2044,6 +2165,14 @@ pub const ParseTask = struct {              .resolve_queue = resolve_queue,              .log = log.*,              .use_directive = use_directive, +            .unique_key_for_additional_file = unique_key_for_additional_file, + +            // Hash the files in here so that we do it in parallel. +            .content_hash_for_additional_file = if (loader.shouldCopyForBundling()) +                ContentHasher.run(source.contents) +            else +                0, +              .watcher_data = .{                  .fd = if (task.contents_or_fd == .fd) task.contents_or_fd.fd.file else 0,                  .dir_fd = if (task.contents_or_fd == .fd) task.contents_or_fd.fd.dir else 0, @@ -2313,16 +2442,27 @@ pub const Graph = struct {      const_values: std.HashMapUnmanaged(Ref, Expr, Ref.HashCtx, 80) = .{}, +    estimated_file_loader_count: usize = 0, + +    additional_output_files: std.ArrayListUnmanaged(options.OutputFile) = .{}, +      pub const InputFile = struct {          source: Logger.Source,          loader: options.Loader = options.Loader.file,          side_effects: _resolver.SideEffects = _resolver.SideEffects.has_side_effects, -        additional_files: BabyList(Index.Int) = .{}, +        additional_files: BabyList(AdditionalFile) = .{}, +        unique_key_for_additional_file: string = "", +        content_hash_for_additional_file: u64 = 0,          pub const List = MultiArrayList(InputFile);      };  }; +pub const AdditionalFile = union(enum) { +    source_index: Index.Int, +    output_file: Index.Int, +}; +  const PathToSourceIndexMap = std.HashMapUnmanaged(u64, Index.Int, IdentityContext(u64), 80);  const EntryPoint = struct { @@ -3170,6 +3310,8 @@ const LinkerContext = struct {                  chunk.template.placeholder.dir = pathname.dir;              } else {                  chunk.template = PathTemplate.chunk; +                if (this.resolver.opts.chunk_names.len > 0) +                    chunk.template.data = this.resolver.opts.chunk_names;              }          } @@ -5696,8 +5838,8 @@ const LinkerContext = struct {          // Start with the hashbang if there is one. This must be done before the          // banner because it only works if it's literally the first character.          if (chunk.isEntryPoint()) { -            if (c.graph.ast.items(.hashbang)[chunk.entry_point.source_index]) |hashbang| { -                std.debug.assert(hashbang.len > 0); +            const hashbang = c.graph.ast.items(.hashbang)[chunk.entry_point.source_index]; +            if (hashbang.len > 0) {                  j.push(hashbang);                  j.push("\n");                  line_offset.advance(hashbang); @@ -5837,7 +5979,8 @@ const LinkerContext = struct {              &j,              cross_chunk_prefix.len > 0 or -                cross_chunk_suffix.len > 0, +                cross_chunk_suffix.len > 0 or +                c.parse_graph.estimated_file_loader_count > 0,              @truncate(u32, ctx.chunks.len),          ) catch @panic("Unhandled out of memory error in breakOutputIntoPieces()"); @@ -7714,11 +7857,12 @@ const LinkerContext = struct {          // Generate the final output files by joining file pieces together          var output_files = std.ArrayList(options.OutputFile).initCapacity(bun.default_allocator, chunks.len + @as(              usize, -            @boolToInt(react_client_components_manifest.len > 0), +            @boolToInt(react_client_components_manifest.len > 0) + c.parse_graph.additional_output_files.items.len,          )) catch unreachable;          output_files.items.len = chunks.len; +          for (chunks, output_files.items) |*chunk, *output_file| { -            const buffer = chunk.intermediate_output.code(chunk, chunks) catch @panic("Failed to allocate memory for output file"); +            const buffer = chunk.intermediate_output.code(c.parse_graph, c.resolver.opts.public_path, chunk, chunks) catch @panic("Failed to allocate memory for output file");              output_file.* = options.OutputFile.initBuf(                  buffer,                  Chunk.IntermediateOutput.allocatorForSize(buffer.len), @@ -7738,6 +7882,8 @@ const LinkerContext = struct {              ));          } +        output_files.appendSliceAssumeCapacity(c.parse_graph.additional_output_files.items); +          return output_files;      } @@ -9097,7 +9243,14 @@ pub const Chunk = struct {                  return bun.default_allocator;          } -        pub fn code(this: IntermediateOutput, chunk: *Chunk, chunks: []Chunk) ![]const u8 { +        pub fn code( +            this: IntermediateOutput, +            graph: *const Graph, +            import_prefix: []const u8, +            chunk: *Chunk, +            chunks: []Chunk, +        ) ![]const u8 { +            const additional_files = graph.input_files.items(.additional_files);              switch (this) {                  .pieces => |*pieces| {                      var count: usize = 0; @@ -9109,9 +9262,26 @@ pub const Chunk = struct {                      for (pieces.slice()) |piece| {                          count += piece.data_len; -                        if (piece.index.kind != .none) { -                            const file_path = chunks[piece.index.index].final_rel_path; -                            count += if (from_chunk_dir.len == 0) file_path.len else bun.path.relative(from_chunk_dir, file_path).len; + +                        switch (piece.index.kind) { +                            .chunk, .asset => { +                                const index = piece.index.index; +                                const file_path = switch (piece.index.kind) { +                                    .asset => graph.additional_output_files.items[additional_files[index].last().?.output_file].input.text, +                                    .chunk => chunks[index].final_rel_path, +                                    else => unreachable, +                                }; + +                                const cheap_normalizer = cheapPrefixNormalizer( +                                    import_prefix, +                                    if (from_chunk_dir.len == 0) +                                        file_path +                                    else +                                        bun.path.relative(from_chunk_dir, file_path), +                                ); +                                count += cheap_normalizer[0].len + cheap_normalizer[1].len; +                            }, +                            .none => {},                          }                      } @@ -9125,19 +9295,35 @@ pub const Chunk = struct {                              @memcpy(remain.ptr, data.ptr, data.len);                          remain = remain[data.len..]; -                        const index = piece.index.index; -                        if (piece.index.kind != .none) { -                            const file_path = chunks[index].final_rel_path; -                            const relative_path = if (from_chunk_dir.len > 0) -                                bun.path.relative(from_chunk_dir, file_path) -                            else -                                file_path; +                        switch (piece.index.kind) { +                            .asset, .chunk => { +                                const index = piece.index.index; +                                const file_path = switch (piece.index.kind) { +                                    .asset => graph.additional_output_files.items[additional_files[index].last().?.output_file].input.text, +                                    .chunk => chunks[index].final_rel_path, +                                    else => unreachable, +                                }; + +                                const cheap_normalizer = cheapPrefixNormalizer( +                                    import_prefix, +                                    if (from_chunk_dir.len == 0) +                                        file_path +                                    else +                                        bun.path.relative(from_chunk_dir, file_path), +                                ); -                            if (relative_path.len > 0) -                                @memcpy(remain.ptr, relative_path.ptr, relative_path.len); +                                if (cheap_normalizer[0].len > 0) { +                                    @memcpy(remain.ptr, cheap_normalizer[0].ptr, cheap_normalizer[0].len); +                                    remain = remain[cheap_normalizer[0].len..]; +                                } -                            remain = remain[relative_path.len..]; +                                if (cheap_normalizer[1].len > 0) { +                                    @memcpy(remain.ptr, cheap_normalizer[1].ptr, cheap_normalizer[1].len); +                                    remain = remain[cheap_normalizer[1].len..]; +                                } +                            }, +                            .none => {},                          }                      } @@ -9309,6 +9495,7 @@ const CompileResult = union(enum) {  };  const ContentHasher = struct { +    // xxhash64 outperforms Wyhash if the file is > 1KB or so      hasher: std.hash.XxHash64 = std.hash.XxHash64.init(0),      pub fn write(self: *ContentHasher, bytes: []const u8) void { @@ -9316,6 +9503,12 @@ const ContentHasher = struct {          self.hasher.update(bytes);      } +    pub fn run(bytes: []const u8) u64 { +        var hasher = ContentHasher{}; +        hasher.write(bytes); +        return hasher.digest(); +    } +      pub fn writeInts(self: *ContentHasher, i: []const u32) void {          // TODO: BigEndian          self.hasher.update(std.mem.sliceAsBytes(i)); @@ -9325,3 +9518,40 @@ const ContentHasher = struct {          return self.hasher.final();      }  }; + +// non-allocating +// meant to be fast but not 100% thorough +// users can correctly put in a trailing slash if they want +// this is just being nice +fn cheapPrefixNormalizer(prefix: []const u8, suffix: []const u8) [2]string { +    if (prefix.len == 0) +        return .{ prefix, suffix }; + +    // There are a few cases here we want to handle: +    // ["https://example.com/", "/out.js"]  => "https://example.com/out.js" +    // ["/foo/", "/bar.js"] => "/foo/bar.js" +    if (strings.endsWithChar(prefix, '/')) { +        if (strings.startsWithChar(suffix, '/')) { +            return .{ +                prefix[0 .. prefix.len - 1], +                suffix[1..suffix.len], +            }; +        } + +        // It gets really complicated if we try to deal with URLs more than this +        // These would be ideal: +        // - example.com + ./out.js => example.com/out.js +        // - example.com/foo + ./out.js => example.com/fooout.js +        // - example.com/bar/ + ./out.js => example.com/bar/out.js +        // But it's not worth the complexity to handle these cases right now. +    } + +    if (suffix.len > "./".len and strings.hasPrefixComptime(suffix, "./")) { +        return .{ +            prefix, +            suffix[2..], +        }; +    } + +    return .{ prefix, suffix }; +} diff --git a/src/js_ast.zig b/src/js_ast.zig index bccf4e0d8..cd9f876bd 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -5651,7 +5651,7 @@ pub const Ast = struct {      /// they can be manipulated efficiently without a full AST traversal      import_records: ImportRecord.List = .{}, -    hashbang: ?string = null, +    hashbang: string = "",      directive: ?string = null,      url_for_css: ?string = null,      parts: Part.List = Part.List{}, diff --git a/src/js_parser.zig b/src/js_parser.zig index df99e3738..72d75d160 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -2803,7 +2803,7 @@ pub const Parser = struct {              }              break :brk .none;          }; -        return .{ .ast = try p.toAST(parts, exports_kind, null) }; +        return .{ .ast = try p.toAST(parts, exports_kind, null, "") };      }      pub fn parse(self: *Parser) !js_ast.Result { @@ -4166,7 +4166,7 @@ pub const Parser = struct {          // Pop the module scope to apply the "ContainsDirectEval" rules          // p.popScope(); -        return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrapper_expr) }; +        return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrapper_expr, hashbang) };      }      pub fn init(_options: Options, log: *logger.Log, source: *const logger.Source, define: *Define, allocator: Allocator) !Parser { @@ -20481,7 +20481,13 @@ fn NewParser_(              p.log.addRangeError(p.source, logger.Range{ .loc = comma_after_spread, .len = 1 }, "Unexpected \",\" after rest pattern") catch unreachable;          } -        pub fn toAST(p: *P, _parts: []js_ast.Part, exports_kind: js_ast.ExportsKind, commonjs_wrapper_expr: ?Expr) !js_ast.Ast { +        pub fn toAST( +            p: *P, +            _parts: []js_ast.Part, +            exports_kind: js_ast.ExportsKind, +            commonjs_wrapper_expr: ?Expr, +            hashbang: []const u8, +        ) !js_ast.Ast {              const allocator = p.allocator;              var parts = _parts; @@ -21181,6 +21187,8 @@ fn NewParser_(                  .bun_plugin = p.bun_plugin,                  .commonjs_named_exports = p.commonjs_named_exports, +                .hashbang = hashbang, +                  // TODO:                  // .const_values = p.const_values,              }; diff --git a/src/options.zig b/src/options.zig index 48ef405a0..38cf09819 100644 --- a/src/options.zig +++ b/src/options.zig @@ -661,6 +661,16 @@ pub const Loader = enum(u8) {      dataurl,      text, +    pub fn shouldCopyForBundling(this: Loader) bool { +        return switch (this) { +            .file, +            // TODO: CSS +            .css, +            => true, +            else => false, +        }; +    } +      pub fn toMimeType(this: Loader) bun.HTTP.MimeType {          return switch (this) {              .jsx, .js, .ts, .tsx => bun.HTTP.MimeType.javascript, @@ -1375,6 +1385,9 @@ pub const BundleOptions = struct {      external: ExternalModules = ExternalModules{},      entry_points: []const string,      entry_names: []const u8 = "", +    asset_names: []const u8 = "", +    chunk_names: []const u8 = "", +    public_path: []const u8 = "",      extension_order: []const string = &Defaults.ExtensionOrder,      esm_extension_order: []const string = &Defaults.ModuleExtensionOrder,      out_extensions: bun.StringHashMap(string), @@ -2571,6 +2584,10 @@ pub const PathTemplate = struct {      data: string = "",      placeholder: Placeholder = .{}, +    pub fn needs(this: *const PathTemplate, comptime field: std.meta.FieldEnum(Placeholder)) bool { +        return strings.contains(this.data, comptime "[" ++ @tagName(field) ++ "]"); +    } +      pub fn format(self: PathTemplate, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {          var remain = self.data;          while (strings.indexOfChar(remain, '[')) |j| { @@ -2649,6 +2666,11 @@ pub const PathTemplate = struct {      };      pub const file = PathTemplate{ +        .data = "./[name].[ext]", +        .placeholder = .{}, +    }; + +    pub const asset = PathTemplate{          .data = "./[name]-[hash].[ext]",          .placeholder = .{},      }; | 
