aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Dylan Conway <35280289+dylan-conway@users.noreply.github.com> 2023-04-20 05:23:12 -0700
committerGravatar GitHub <noreply@github.com> 2023-04-20 05:23:12 -0700
commitd78ecc76c854310fd47da32071d22301b0782ec3 (patch)
treedcee5bde1b3598203de25f07a632cfb55aaa01e5
parent9e7bfdec8cd4fc1827a5d793844afe36638ded37 (diff)
downloadbun-d78ecc76c854310fd47da32071d22301b0782ec3.tar.gz
bun-d78ecc76c854310fd47da32071d22301b0782ec3.tar.zst
bun-d78ecc76c854310fd47da32071d22301b0782ec3.zip
Symbol minification (#2695)
* minify * Update renamer.zig * --minify-whitespace * Speed up minification a little * handle private names * 5% faster minification * use helper function * fix nested scope slots * `bun build --minify` gets another +8% faster * print semicolons afterwards * print semicolon after checking error * after all error checking * Delete code for generating legacy bundes * remove extra whitespace around if statements * print space before import identifier * Use `@constCast` * Make `S.Local#decls` use `BabyList(Decl)` * Add `fromSlice` helper to `BabyList` * Remove unnecessary optional chains * minify `undefined, true, false` * Another @constCast * Implement merge adjacent local var * Support --minify in `bun build --transform` * skip comments when counting character frequencies * Don't wrap commonjs with --transform on (unless targeting bun) * Support --minify in the runtime * Fix edgecase with import * as * don't infinite loop * --trnasform shouldn't mess with require * Only track comments when minifying --------- Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
-rw-r--r--src/baby_list.zig26
-rw-r--r--src/bun.js/module_loader.zig2
-rw-r--r--src/bun.zig2
-rw-r--r--src/bun_js.zig7
-rw-r--r--src/bundler.zig22
-rw-r--r--src/bundler/bundle_v2.zig247
-rw-r--r--src/bundler/generate_node_modules_bundle.zig1940
-rw-r--r--src/cli.zig15
-rw-r--r--src/cli/build_command.zig9
-rw-r--r--src/env_loader.zig8
-rw-r--r--src/http.zig4
-rw-r--r--src/js_ast.zig196
-rw-r--r--src/js_lexer.zig18
-rw-r--r--src/js_parser.zig337
-rw-r--r--src/js_printer.zig348
-rw-r--r--src/options.zig2
-rw-r--r--src/renamer.zig392
-rw-r--r--src/runtime.zig1
-rw-r--r--test/bundler/expectBundled.ts3
19 files changed, 1250 insertions, 2329 deletions
diff --git a/src/baby_list.zig b/src/baby_list.zig
index 7d3ca44ed..b2c6584c9 100644
--- a/src/baby_list.zig
+++ b/src/baby_list.zig
@@ -37,8 +37,7 @@ pub fn BabyList(comptime Type: type) type {
@setRuntimeSafety(false);
return ListType{
// Remove the const qualifier from the items
- .ptr = @intToPtr([*]Type, @ptrToInt(items.ptr)),
-
+ .ptr = @constCast(items.ptr),
.len = @truncate(u32, items.len),
.cap = @truncate(u32, items.len),
};
@@ -92,15 +91,21 @@ pub fn BabyList(comptime Type: type) type {
pub inline fn init(items: []const Type) ListType {
@setRuntimeSafety(false);
return ListType{
- // Remove the const qualifier from the items
- .ptr = @intToPtr([*]Type, @ptrToInt(items.ptr)),
-
+ .ptr = @constCast(items.ptr),
.len = @truncate(u32, items.len),
.cap = @truncate(u32, items.len),
};
}
pub inline fn fromList(list_: anytype) ListType {
+ if (comptime @TypeOf(list_) == ListType) {
+ return list_;
+ }
+
+ if (comptime @TypeOf(list_) == []const Elem) {
+ return init(list_);
+ }
+
if (comptime Environment.allow_assert) {
std.debug.assert(list_.items.len <= list_.capacity);
}
@@ -112,6 +117,17 @@ pub fn BabyList(comptime Type: type) type {
};
}
+ pub inline fn fromSlice(allocator: std.mem.Allocator, items: []const Elem) !ListType {
+ var allocated = try allocator.alloc(Elem, items.len);
+ bun.copy(Elem, allocated, items);
+
+ return ListType{
+ .ptr = allocated.ptr,
+ .len = @truncate(u32, allocated.len),
+ .cap = @truncate(u32, allocated.len),
+ };
+ }
+
pub fn update(this: *ListType, list_: anytype) void {
this.* = .{
.ptr = list_.items.ptr,
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
index ee1f392c5..127bc85e3 100644
--- a/src/bun.js/module_loader.zig
+++ b/src/bun.js/module_loader.zig
@@ -1606,6 +1606,8 @@ pub const ModuleLoader = struct {
opts.features.hot_module_reloading = false;
opts.features.top_level_await = true;
opts.features.react_fast_refresh = false;
+ opts.features.minify_identifiers = bundler.options.minify_identifiers;
+ opts.features.minify_syntax = bundler.options.minify_syntax;
opts.filepath_hash_for_hmr = 0;
opts.warn_about_unbundled_modules = false;
opts.macro_context = &jsc_vm.bundler.macro_context.?;
diff --git a/src/bun.zig b/src/bun.zig
index 438129754..9fe21a05a 100644
--- a/src/bun.zig
+++ b/src/bun.zig
@@ -236,7 +236,7 @@ pub const MutableString = @import("string_mutable.zig").MutableString;
pub const RefCount = @import("./ref_count.zig").RefCount;
pub inline fn constStrToU8(s: []const u8) []u8 {
- return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
+ return @constCast(s);
}
pub const MAX_PATH_BYTES: usize = if (Environment.isWasm) 1024 else std.fs.MAX_PATH_BYTES;
diff --git a/src/bun_js.zig b/src/bun_js.zig
index 8d725cbae..b2d8ab77c 100644
--- a/src/bun_js.zig
+++ b/src/bun_js.zig
@@ -82,6 +82,13 @@ pub const Run = struct {
b.options.prefer_latest_install = b.resolver.opts.prefer_latest_install;
b.resolver.env_loader = b.env;
+ b.options.minify_identifiers = ctx.bundler_options.minify_identifiers;
+ b.options.minify_whitespace = ctx.bundler_options.minify_whitespace;
+ b.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers;
+ b.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace;
+
+ // b.options.minify_syntax = ctx.bundler_options.minify_syntax;
+
if (ctx.debug.macros) |macros| {
b.options.macro_remap = macros;
}
diff --git a/src/bundler.zig b/src/bundler.zig
index 34544afb0..f2da1a960 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -738,8 +738,6 @@ pub const Bundler = struct {
Output.flush();
}
- pub const GenerateNodeModulesBundle = @import("./bundler/generate_node_modules_bundle.zig");
-
pub const BuildResolveResultPair = struct {
written: usize,
input_fd: ?StoredFileDescriptorType,
@@ -1155,6 +1153,9 @@ pub const Bundler = struct {
.source_map_handler = source_map_context,
.rewrite_require_resolve = bundler.options.platform != .node,
.minify_whitespace = bundler.options.minify_whitespace,
+ .minify_syntax = bundler.options.minify_syntax,
+ .minify_identifiers = bundler.options.minify_identifiers,
+ .transform_only = bundler.options.transform_only,
},
enable_source_map,
),
@@ -1174,6 +1175,9 @@ pub const Bundler = struct {
.css_import_behavior = bundler.options.cssImportBehavior(),
.rewrite_require_resolve = bundler.options.platform != .node,
.minify_whitespace = bundler.options.minify_whitespace,
+ .minify_syntax = bundler.options.minify_syntax,
+ .minify_identifiers = bundler.options.minify_identifiers,
+ .transform_only = bundler.options.transform_only,
},
enable_source_map,
),
@@ -1191,6 +1195,10 @@ pub const Bundler = struct {
.require_ref = ast.require_ref,
.css_import_behavior = bundler.options.cssImportBehavior(),
.source_map_handler = source_map_context,
+ .minify_whitespace = bundler.options.minify_whitespace,
+ .minify_syntax = bundler.options.minify_syntax,
+ .minify_identifiers = bundler.options.minify_identifiers,
+ .transform_only = bundler.options.transform_only,
},
enable_source_map,
),
@@ -1208,6 +1216,10 @@ pub const Bundler = struct {
.require_ref = ast.require_ref,
.css_import_behavior = bundler.options.cssImportBehavior(),
.source_map_handler = source_map_context,
+ .minify_whitespace = bundler.options.minify_whitespace,
+ .minify_syntax = bundler.options.minify_syntax,
+ .minify_identifiers = bundler.options.minify_identifiers,
+ .transform_only = bundler.options.transform_only,
},
enable_source_map,
),
@@ -1377,6 +1389,7 @@ pub const Bundler = struct {
opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript();
opts.features.should_fold_typescript_constant_expressions = loader.isTypeScript() or platform.isBun() or bundler.options.inlining;
opts.features.dynamic_require = platform.isBun();
+ opts.transform_only = bundler.options.transform_only;
// @bun annotation
opts.features.dont_bundle_twice = this_parse.dont_bundle_twice;
@@ -1400,7 +1413,7 @@ pub const Bundler = struct {
opts.filepath_hash_for_hmr = file_hash orelse 0;
opts.features.auto_import_jsx = bundler.options.auto_import_jsx;
opts.warn_about_unbundled_modules = platform.isNotBun();
- opts.features.jsx_optimization_inline = (bundler.options.jsx_optimization_inline orelse (platform.isBun() and jsx.parse and
+ opts.features.jsx_optimization_inline = opts.features.allow_runtime and (bundler.options.jsx_optimization_inline orelse (platform.isBun() and jsx.parse and
!jsx.development)) and
(jsx.runtime == .automatic or jsx.runtime == .classic);
@@ -1408,6 +1421,7 @@ pub const Bundler = struct {
opts.features.hoist_bun_plugin = this_parse.hoist_bun_plugin;
opts.features.inject_jest_globals = this_parse.inject_jest_globals;
opts.features.minify_syntax = bundler.options.minify_syntax;
+ opts.features.minify_identifiers = bundler.options.minify_identifiers;
if (bundler.macro_context == null) {
bundler.macro_context = js_ast.Macro.MacroContext.init(bundler);
@@ -1727,7 +1741,7 @@ pub const Bundler = struct {
if (log.level == .verbose) {
bundler.resolver.debug_logs = try DebugLogs.init(allocator);
}
-
+ bundler.options.transform_only = true;
var did_start = false;
if (bundler.options.output_dir_handle == null) {
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index 2bdac2817..b0ce41fb8 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -1,5 +1,4 @@
const Bundler = bun.Bundler;
-const GenerateNodeModulesBundle = @This();
const bun = @import("root").bun;
const from = bun.from;
const string = bun.string;
@@ -76,6 +75,8 @@ const B = js_ast.B;
const Binding = js_ast.Binding;
const AutoBitSet = bun.bit_set.AutoBitSet;
const renamer = bun.renamer;
+const StableSymbolCount = renamer.StableSymbolCount;
+const MinifyRenamer = renamer.MinifyRenamer;
const Scope = js_ast.Scope;
const JSC = bun.JSC;
const debugTreeShake = Output.scoped(.TreeShake, true);
@@ -414,6 +415,11 @@ pub const BundleV2 = struct {
generator.linker.resolver = &generator.bundler.resolver;
generator.linker.graph.code_splitting = bundler.options.code_splitting;
generator.graph.code_splitting = bundler.options.code_splitting;
+
+ generator.linker.options.minify_syntax = bundler.options.minify_syntax;
+ generator.linker.options.minify_identifiers = bundler.options.minify_identifiers;
+ generator.linker.options.minify_whitespace = bundler.options.minify_whitespace;
+
var pool = try generator.graph.allocator.create(ThreadPool);
if (enable_reloading) {
Watcher.enableHotModuleReloading(generator);
@@ -973,6 +979,7 @@ const ParseTask = struct {
opts.features.trim_unused_imports = loader.isTypeScript() or (bundler.options.trim_unused_imports orelse false);
opts.features.inlining = bundler.options.minify_syntax;
opts.features.minify_syntax = bundler.options.minify_syntax;
+ opts.features.minify_identifiers = bundler.options.minify_identifiers;
opts.features.should_fold_typescript_constant_expressions = opts.features.inlining or loader.isTypeScript();
opts.tree_shaking = task.tree_shaking;
@@ -2064,6 +2071,7 @@ const LinkerContext = struct {
tree_shaking: bool = true,
minify_whitespace: bool = false,
minify_syntax: bool = false,
+ minify_identifiers: bool = false,
mode: Mode = Mode.bundle,
@@ -2639,10 +2647,8 @@ const LinkerContext = struct {
S.Local,
S.Local{
.is_export = true,
- .decls = bun.fromSlice(
- []js_ast.G.Decl,
+ .decls = js_ast.G.Decl.List.fromSlice(
this.allocator,
- []const js_ast.G.Decl,
&.{
.{
.binding = Binding.alloc(
@@ -3624,7 +3630,7 @@ const LinkerContext = struct {
allocator_,
js_ast.S.Local,
.{
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
},
loc,
);
@@ -4290,7 +4296,7 @@ const LinkerContext = struct {
for (stable_ref_list.items, clause_items.slice()) |stable_ref, *clause_item| {
const ref = stable_ref.ref;
- const alias = r.nextRenamedName(c.graph.symbols.get(ref).?.original_name);
+ const alias = if (c.options.minify_identifiers) try r.nextMinifiedName(c.allocator) else r.nextRenamedName(c.graph.symbols.get(ref).?.original_name);
clause_item.* = .{
.name = .{
@@ -4411,59 +4417,135 @@ const LinkerContext = struct {
chunk: *Chunk,
files_in_order: []const u32,
) !renamer.Renamer {
-
- // TODO: minify identifiers
const all_module_scopes = c.graph.ast.items(.module_scope);
const all_flags: []const JSMeta.Flags = c.graph.meta.items(.flags);
const all_parts: []const js_ast.Part.List = c.graph.ast.items(.parts);
const all_wrapper_refs: []const Ref = c.graph.ast.items(.wrapper_ref);
const all_import_records: []const ImportRecord.List = c.graph.ast.items(.import_records);
- var r = try renamer.NumberRenamer.init(
- allocator,
- allocator,
- c.graph.symbols,
- brk: {
- var reserved_names = try renamer.computeInitialReservedNames(allocator);
+ var reserved_names = try renamer.computeInitialReservedNames(allocator);
+ for (files_in_order) |source_index| {
+ renamer.computeReservedNamesForScope(&all_module_scopes[source_index], &c.graph.symbols, &reserved_names, allocator);
+ }
+
+ var sorted_imports_from_other_chunks: std.ArrayList(StableRef) = brk: {
+ var list = std.ArrayList(StableRef).init(allocator);
+ var count: u32 = 0;
+ var imports_from_other_chunks = chunk.content.javascript.imports_from_other_chunks.values();
+ for (imports_from_other_chunks) |item| {
+ count += item.len;
+ }
+
+ list.ensureTotalCapacityPrecise(count) catch unreachable;
+ list.items.len = count;
+ var remain = list.items;
+ const stable_source_indices = c.graph.stable_source_indices;
+ for (imports_from_other_chunks) |item| {
+ for (item.slice()) |ref| {
+ remain[0] = StableRef{
+ .stable_source_index = stable_source_indices[ref.ref.sourceIndex()],
+ .ref = ref.ref,
+ };
+ remain = remain[1..];
+ }
+ }
+
+ std.sort.sort(StableRef, list.items, {}, StableRef.isLessThan);
+ break :brk list;
+ };
+ defer sorted_imports_from_other_chunks.deinit();
+
+ if (c.options.minify_identifiers) {
+ const first_top_level_slots: js_ast.SlotCounts = brk: {
+ var slots = js_ast.SlotCounts{};
+ const nested_scope_slot_counts = c.graph.ast.items(.nested_scope_slot_counts);
+ for (files_in_order) |i| {
+ slots.unionMax(nested_scope_slot_counts[i]);
+ }
+ break :brk slots;
+ };
+
+ var minify_renamer = try MinifyRenamer.init(allocator, c.graph.symbols, first_top_level_slots, reserved_names);
+
+ var top_level_symbols = renamer.StableSymbolCount.Array.init(allocator);
+ defer top_level_symbols.deinit();
+
+ var top_level_symbols_all = renamer.StableSymbolCount.Array.init(allocator);
+ var stable_source_indices = c.graph.stable_source_indices;
+ var freq = js_ast.CharFreq{
+ .freqs = [_]i32{0} ** 64,
+ };
+ var capacity = sorted_imports_from_other_chunks.items.len;
+ {
+ const char_freqs = c.graph.ast.items(.char_freq);
for (files_in_order) |source_index| {
- renamer.computeReservedNamesForScope(&all_module_scopes[source_index], &c.graph.symbols, &reserved_names, allocator);
+ if (char_freqs[source_index]) |char_freq| {
+ freq.include(char_freq);
+ }
}
+ }
- break :brk reserved_names;
- },
- );
- {
- var sorted_imports_from_other_chunks: std.ArrayList(StableRef) = brk: {
- var list = std.ArrayList(StableRef).init(allocator);
- var count: u32 = 0;
- var imports_from_other_chunks = chunk.content.javascript.imports_from_other_chunks.values();
- for (imports_from_other_chunks) |item| {
- count += item.len;
+ const uses_exports_ref_list = c.graph.ast.items(.uses_exports_ref);
+ const uses_module_ref_list = c.graph.ast.items(.uses_module_ref);
+ const exports_ref_list = c.graph.ast.items(.exports_ref);
+ const module_ref_list = c.graph.ast.items(.module_ref);
+ const parts_list = c.graph.ast.items(.parts);
+
+ for (files_in_order) |source_index| {
+ const uses_exports_ref = uses_exports_ref_list[source_index];
+ const uses_module_ref = uses_module_ref_list[source_index];
+ const exports_ref = exports_ref_list[source_index];
+ const module_ref = module_ref_list[source_index];
+ const parts = parts_list[source_index];
+
+ top_level_symbols.clearRetainingCapacity();
+
+ if (uses_exports_ref) {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, exports_ref, 1, stable_source_indices);
+ }
+ if (uses_module_ref) {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, module_ref, 1, stable_source_indices);
}
- list.ensureTotalCapacityPrecise(count) catch unreachable;
- list.items.len = count;
- var remain = list.items;
- const stable_source_indices = c.graph.stable_source_indices;
- for (imports_from_other_chunks) |item| {
- for (item.slice()) |ref| {
- remain[0] = StableRef{
- .stable_source_index = stable_source_indices[ref.ref.sourceIndex()],
- .ref = ref.ref,
- };
- remain = remain[1..];
+ for (parts.slice()) |part| {
+ if (!part.is_live) {
+ continue;
+ }
+
+ try minify_renamer.accumulateSymbolUseCounts(&top_level_symbols, part.symbol_uses, stable_source_indices);
+
+ for (part.declared_symbols.refs()) |declared_ref| {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, declared_ref, 1, stable_source_indices);
}
}
- std.sort.sort(StableRef, list.items, {}, StableRef.isLessThan);
- break :brk list;
- };
- defer sorted_imports_from_other_chunks.deinit();
+ std.sort.sort(renamer.StableSymbolCount, top_level_symbols.items, {}, StableSymbolCount.lessThan);
+ capacity += top_level_symbols.items.len;
+ top_level_symbols_all.appendSlice(top_level_symbols.items) catch unreachable;
+ }
- for (sorted_imports_from_other_chunks.items) |stable_ref| {
- r.addTopLevelSymbol(stable_ref.ref);
+ top_level_symbols.clearRetainingCapacity();
+ for (sorted_imports_from_other_chunks.items) |stable| {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, stable.ref, 1, stable_source_indices);
}
+ top_level_symbols_all.appendSlice(top_level_symbols.items) catch unreachable;
+ try minify_renamer.allocateTopLevelSymbolSlots(top_level_symbols_all);
+
+ var minifier = freq.compile(allocator);
+ try minify_renamer.assignNamesByFrequency(&minifier);
+
+ return minify_renamer.toRenamer();
+ }
+
+ var r = try renamer.NumberRenamer.init(
+ allocator,
+ allocator,
+ c.graph.symbols,
+ reserved_names,
+ );
+ for (sorted_imports_from_other_chunks.items) |stable_ref| {
+ r.addTopLevelSymbol(stable_ref.ref);
}
var sorted_ = std.ArrayList(u32).init(r.temp_allocator);
@@ -4677,6 +4759,8 @@ const LinkerContext = struct {
.allocator = allocator,
.require_ref = runtimeRequireRef,
.minify_whitespace = c.options.minify_whitespace,
+ .minify_identifiers = c.options.minify_identifiers,
+ .minify_syntax = c.options.minify_syntax,
.const_values = c.graph.const_values,
};
@@ -5135,10 +5219,8 @@ const LinkerContext = struct {
Stmt.alloc(
S.Local,
.{
- .decls = bun.fromSlice(
- []js_ast.G.Decl,
+ .decls = js_ast.G.Decl.List.fromSlice(
temp_allocator,
- []const js_ast.G.Decl,
&.{
.{
.binding = Binding.alloc(
@@ -5302,6 +5384,7 @@ const LinkerContext = struct {
.require_or_import_meta_for_source_callback = js_printer.RequireOrImportMeta.Callback.init(LinkerContext, requireOrImportMetaForSource, c),
.minify_whitespace = c.options.minify_whitespace,
+ .minify_syntax = c.options.minify_syntax,
.const_values = c.graph.const_values,
};
@@ -5355,6 +5438,46 @@ const LinkerContext = struct {
}
};
+ fn mergeAdjacentLocalStmts(stmts: *std.ArrayList(Stmt), allocator: std.mem.Allocator) void {
+ if (stmts.items.len == 0)
+ return;
+
+ var did_merge_with_previous_local = false;
+ var end: usize = 1;
+
+ for (stmts.items[1..]) |stmt| {
+ // Try to merge with the previous variable statement
+ if (stmt.data == .s_local) {
+ var after = stmt.data.s_local;
+ if (stmts.items[end - 1].data == .s_local) {
+ var before = stmts.items[end - 1].data.s_local;
+ // It must be the same kind of variable statement (i.e. let/var/const)
+ if (before.kind == after.kind and before.is_export == after.is_export) {
+ if (did_merge_with_previous_local) {
+ // Avoid O(n^2) behavior for repeated variable declarations
+ // Appending to this decls list is safe because did_merge_with_previous_local is true
+ before.decls.append(allocator, after.decls.slice()) catch unreachable;
+ } else {
+ // Append the declarations to the previous variable statement
+ did_merge_with_previous_local = true;
+
+ var clone = std.ArrayList(G.Decl).initCapacity(allocator, before.decls.len + after.decls.len) catch unreachable;
+ clone.appendSliceAssumeCapacity(before.decls.slice());
+ clone.appendSliceAssumeCapacity(after.decls.slice());
+ before.decls.update(clone);
+ }
+ continue;
+ }
+ }
+ }
+
+ did_merge_with_previous_local = false;
+ stmts.items[end] = stmt;
+ end += 1;
+ }
+ stmts.items.len = end;
+ }
+
fn shouldRemoveImportExportStmt(
c: *LinkerContext,
stmts: *StmtList,
@@ -5380,10 +5503,8 @@ const LinkerContext = struct {
Stmt.alloc(
S.Local,
S.Local{
- .decls = try bun.fromSlice(
- []G.Decl,
+ .decls = G.Decl.List.fromSlice(
allocator,
- []const G.Decl,
&.{
.{
.binding = Binding.alloc(
@@ -5402,7 +5523,7 @@ const LinkerContext = struct {
),
},
},
- ),
+ ) catch unreachable,
},
record.range.loc,
),
@@ -5425,10 +5546,8 @@ const LinkerContext = struct {
Stmt.alloc(
S.Local,
S.Local{
- .decls = try bun.fromSlice(
- []G.Decl,
+ .decls = try G.Decl.List.fromSlice(
allocator,
- []const G.Decl,
&.{
.{
.binding = Binding.alloc(
@@ -5902,7 +6021,7 @@ const LinkerContext = struct {
stmt.data.s_local.is_export = false;
} else if (FeatureFlags.unwrap_commonjs_to_esm and s.was_commonjs_export and wrap == .cjs) {
std.debug.assert(stmt.data.s_local.decls.len == 1);
- const decl = stmt.data.s_local.decls[0];
+ const decl = stmt.data.s_local.decls.ptr[0];
stmt = Stmt.alloc(
S.SExpr,
S.SExpr{
@@ -5939,10 +6058,8 @@ const LinkerContext = struct {
stmt = Stmt.alloc(
S.Local,
S.Local{
- .decls = try bun.fromSlice(
- []js_ast.G.Decl,
+ .decls = try G.Decl.List.fromSlice(
allocator,
- []const js_ast.G.Decl,
&.{
.{
.binding = Binding.alloc(
@@ -6004,10 +6121,8 @@ const LinkerContext = struct {
stmt = Stmt.alloc(
S.Local,
S.Local{
- .decls = try bun.fromSlice(
- []js_ast.G.Decl,
+ .decls = try G.Decl.List.fromSlice(
allocator,
- []const js_ast.G.Decl,
&.{
.{
.binding = Binding.alloc(
@@ -6247,6 +6362,10 @@ const LinkerContext = struct {
stmts.inside_wrapper_prefix.items.len = 0;
stmts.inside_wrapper_suffix.items.len = 0;
+ if (c.options.minify_syntax) {
+ mergeAdjacentLocalStmts(&stmts.all_stmts, temp_allocator);
+ }
+
// TODO: mergeAdjacentLocalStmts
var out_stmts: []js_ast.Stmt = stmts.all_stmts.items;
@@ -6338,7 +6457,7 @@ const LinkerContext = struct {
Stmt.alloc(
S.Local,
S.Local{
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
},
Logger.Loc.Empty,
),
@@ -6390,7 +6509,7 @@ const LinkerContext = struct {
.s_local => |local| {
if (local.was_commonjs_export or ast.commonjs_named_exports.count() == 0) {
var value: Expr = Expr.init(E.Missing, E.Missing{}, Logger.Loc.Empty);
- for (local.decls) |*decl| {
+ for (local.decls.slice()) |*decl| {
const binding = decl.binding.toExpr(&hoisty);
if (decl.value) |other| {
value = value.joinWithComma(
@@ -6432,7 +6551,7 @@ const LinkerContext = struct {
Stmt.alloc(
S.Local,
S.Local{
- .decls = hoisty.decls.items,
+ .decls = G.Decl.List.fromList(hoisty.decls),
},
Logger.Loc.Empty,
),
@@ -6488,7 +6607,7 @@ const LinkerContext = struct {
Stmt.alloc(
S.Local,
S.Local{
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
},
Logger.Loc.Empty,
),
@@ -6525,6 +6644,8 @@ const LinkerContext = struct {
.commonjs_named_exports_ref = ast.exports_ref,
.commonjs_named_exports_deoptimized = flags.wrap == .cjs,
.const_values = c.graph.const_values,
+ .minify_whitespace = c.options.minify_whitespace,
+ .minify_syntax = c.options.minify_syntax,
.allocator = allocator,
.to_esm_ref = toESMRef,
diff --git a/src/bundler/generate_node_modules_bundle.zig b/src/bundler/generate_node_modules_bundle.zig
deleted file mode 100644
index 2bb0e72a6..000000000
--- a/src/bundler/generate_node_modules_bundle.zig
+++ /dev/null
@@ -1,1940 +0,0 @@
-const Bundler = bun.Bundler;
-const GenerateNodeModulesBundle = @This();
-const bun = @import("root").bun;
-const string = bun.string;
-const Output = bun.Output;
-const Global = bun.Global;
-const Environment = bun.Environment;
-const strings = bun.strings;
-const MutableString = bun.MutableString;
-const stringZ = bun.stringZ;
-const default_allocator = bun.default_allocator;
-const StoredFileDescriptorType = bun.StoredFileDescriptorType;
-const FeatureFlags = bun.FeatureFlags;
-const C = bun.C;
-const std = @import("std");
-const lex = bun.js_lexer;
-const logger = @import("root").bun.logger;
-const options = @import("../options.zig");
-const js_parser = bun.js_parser;
-const json_parser = bun.JSON;
-const js_printer = bun.js_printer;
-const js_ast = bun.JSAst;
-const linker = @import("../linker.zig");
-const Ref = @import("../ast/base.zig").Ref;
-const Define = @import("../defines.zig").Define;
-const DebugOptions = @import("../cli.zig").Command.DebugOptions;
-const ThreadPoolLib = @import("../thread_pool.zig");
-
-const Fs = @import("../fs.zig");
-const schema = @import("../api/schema.zig");
-const Api = schema.Api;
-const _resolver = @import("../resolver/resolver.zig");
-const sync = @import("../sync.zig");
-const ImportRecord = @import("../import_record.zig").ImportRecord;
-const allocators = @import("../allocators.zig");
-const MimeType = @import("../http/mime_type.zig");
-const resolve_path = @import("../resolver/resolve_path.zig");
-const runtime = @import("../runtime.zig");
-const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
-const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
-const DebugLogs = _resolver.DebugLogs;
-const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
-const Router = @import("../router.zig");
-const isPackagePath = _resolver.isPackagePath;
-const Lock = @import("../lock.zig").Lock;
-const NodeFallbackModules = @import("../node_fallbacks.zig");
-const CacheEntry = @import("../cache.zig").FsCacheEntry;
-const Analytics = @import("../analytics/analytics_thread.zig");
-const URL = @import("../url.zig").URL;
-const Report = @import("../report.zig");
-const Linker = linker.Linker;
-const Resolver = _resolver.Resolver;
-const TOML = @import("../toml/toml_parser.zig").TOML;
-
-const EntryPoints = @import("./entry_points.zig");
-const BunQueue = sync.Channel(PendingImports, .Dynamic);
-const GenerateNodeModuleBundle = @This();
-const ThisBundler = bun.Bundler;
-const JSC = @import("root").bun.JSC;
-pub const ThreadPool = struct {
- pool: ThreadPoolLib = undefined,
- // Hardcode 512 as max number of threads for now.
- workers: [512]Worker = undefined,
- workers_used: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
- cpu_count: u32 = 0,
- started_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
- stopped_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
- completed_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
- pending_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
-
- generator: *GenerateNodeModuleBundle = undefined,
-
- pub fn start(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void {
- generator.bundler.env.loadProcess();
- this.generator = generator;
-
- this.cpu_count = @truncate(u32, @divFloor((try std.Thread.getCpuCount()) + 1, 2));
-
- if (generator.bundler.env.map.get("GOMAXPROCS")) |max_procs| {
- if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count| {
- this.cpu_count = std.math.min(this.cpu_count, cpu_count);
- } else |_| {}
- }
-
- this.pool = ThreadPoolLib.init(.{
- .max_threads = this.cpu_count,
- });
- this.pool.on_thread_spawn = Worker.onSpawn;
- this.pool.threadpool_context = this;
- var workers_used: u32 = 0;
- while (workers_used < this.cpu_count) : (workers_used += 1) {
- try this.workers[workers_used].init(generator);
- }
- if (workers_used > 0)
- this.pool.forceSpawn();
- }
-
- pub fn wait(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void {
- while (true) {
- while (this.generator.queue.tryReadItem() catch null) |queue| {
- var iter = queue.iterator();
- var batch = ThreadPoolLib.Batch{};
- var count: u32 = 0;
- while (iter.next()) |entry| {
- const module_id: u32 = entry.key_ptr.*;
- const exists = generator.enqueued_map.getOrPut(module_id) catch unreachable;
- if (exists.found_existing) {
- continue;
- }
- batch.push(ThreadPoolLib.Batch.from(&entry.value_ptr.*.task));
- count += 1;
- }
- _ = this.pending_count.fetchAdd(count, .Monotonic);
- this.pool.schedule(batch);
- }
-
- if (this.completed_count.load(.Monotonic) > 0 and this.completed_count.load(.Monotonic) == this.pending_count.load(.Monotonic)) {
- break;
- }
-
- std.atomic.spinLoopHint();
- }
-
- const workers: []const Worker = this.workers[0..this.workers_used.loadUnchecked()];
- for (workers) |worker| {
- this.generator.estimated_input_lines_of_code += worker.data.estimated_input_lines_of_code;
- try worker.data.log.appendTo(this.generator.log);
- }
- }
-
- pub const Task = struct {
- result: _resolver.Result,
- generator: *GenerateNodeModuleBundle,
- };
-
- pub const Worker = struct {
- thread_id: std.Thread.Id,
- thread: std.Thread,
-
- allocator: std.mem.Allocator,
- generator: *GenerateNodeModuleBundle,
- data: *WorkerData = undefined,
- quit: bool = false,
-
- has_notify_started: bool = false,
-
- pub const WorkerData = struct {
- shared_buffer: MutableString = undefined,
- scan_pass_result: js_parser.ScanPassResult = undefined,
- log: *logger.Log,
- estimated_input_lines_of_code: usize = 0,
- macro_context: js_ast.Macro.MacroContext,
- bundler: Bundler = undefined,
-
- pub fn deinit(this: *WorkerData, allocator: std.mem.Allocator) void {
- this.shared_buffer.deinit();
- this.scan_pass_result.named_imports.deinit();
- this.scan_pass_result.import_records.deinit();
- allocator.destroy(this);
- }
- };
-
- pub fn init(worker: *Worker, generator: *GenerateNodeModuleBundle) !void {
- worker.generator = generator;
- worker.allocator = generator.allocator;
- }
-
- pub fn onSpawn(ctx: ?*anyopaque) ?*anyopaque {
- var pool = @ptrCast(*ThreadPool, @alignCast(@alignOf(*ThreadPool), ctx.?));
-
- const id = pool.workers_used.fetchAdd(1, .Monotonic);
- pool.workers[id].run();
- return &pool.workers[id];
- }
-
- pub fn notifyStarted(this: *Worker) void {
- if (!this.has_notify_started) {
- this.has_notify_started = true;
- _ = this.generator.pool.started_workers.fetchAdd(1, .Release);
- std.Thread.Futex.wake(&this.generator.pool.started_workers, std.math.maxInt(u32));
- }
- }
-
- pub fn run(this: *Worker) void {
- Output.Source.configureThread();
- this.thread_id = std.Thread.getCurrentId();
-
- js_ast.Expr.Data.Store.create(this.generator.allocator);
- js_ast.Stmt.Data.Store.create(this.generator.allocator);
- this.data = this.generator.allocator.create(WorkerData) catch unreachable;
- this.data.* = WorkerData{
- .log = this.generator.allocator.create(logger.Log) catch unreachable,
- .estimated_input_lines_of_code = 0,
- .macro_context = js_ast.Macro.MacroContext.init(this.generator.bundler),
- };
- this.data.log.* = logger.Log.init(this.generator.allocator);
- this.data.shared_buffer = MutableString.init(this.generator.allocator, 0) catch unreachable;
- this.data.scan_pass_result = js_parser.ScanPassResult.init(this.generator.allocator);
- this.data.bundler = this.generator.bundler.*;
- var bundler_ptr = &this.data.bundler;
- const CacheSet = @import("../cache.zig");
- // no funny business mr. cache
- bundler_ptr.resolver.caches = CacheSet.Set.init(this.allocator);
- bundler_ptr.linker.resolver = &bundler_ptr.resolver;
- bundler_ptr.log = this.data.log;
- bundler_ptr.linker.log = this.data.log;
- bundler_ptr.linker.resolver.log = this.data.log;
- }
-
- pub const ProcessFileTask = struct {
- resolution: _resolver.Result,
- task: ThreadPoolLib.Task = .{ .callback = &callback },
-
- pub fn callback(task: *ThreadPoolLib.Task) void {
- var worker = @ptrCast(
- *ThreadPool.Worker,
- @alignCast(
- @alignOf(*ThreadPool.Worker),
- ThreadPoolLib.Thread.current.?.ctx.?,
- ),
- );
- var process: *ProcessFileTask = @fieldParentPtr(ProcessFileTask, "task", task);
-
- worker.generator.processFile(
- worker,
- &worker.data.bundler,
- process.resolution,
- ) catch {};
- _ = worker.generator.pool.completed_count.fetchAdd(1, .Monotonic);
- }
- };
- };
-};
-write_lock: Lock,
-log_lock: Lock = Lock.init(),
-module_list: std.ArrayList(Api.JavascriptBundledModule),
-package_list: std.ArrayList(Api.JavascriptBundledPackage),
-header_string_buffer: MutableString,
-
-// Just need to know if we've already enqueued this one
-package_list_map: std.AutoHashMap(u64, u32),
-enqueued_map: std.AutoHashMap(u32, void),
-queue: BunQueue,
-bundler: *ThisBundler,
-
-allocator: std.mem.Allocator,
-tmpfile: std.fs.File,
-log: *logger.Log,
-pool: *ThreadPool,
-tmpfile_byte_offset: u32 = 0,
-code_end_byte_offset: u32 = 0,
-has_jsx: bool = false,
-estimated_input_lines_of_code: usize = 0,
-
-work_waiter: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
-list_lock: Lock = Lock.init(),
-
-dynamic_import_file_size_store: U32Map,
-dynamic_import_file_size_store_lock: Lock,
-
-always_bundled_package_hashes: []u32 = &[_]u32{},
-always_bundled_package_jsons: []*const PackageJSON = &.{},
-always_bundled_booleans: []bool = &.{},
-package_bundle_map: options.BundlePackage.Map = options.BundlePackage.Map{},
-
-const U32Map = std.AutoHashMap(u32, u32);
-pub const current_version: u32 = 1;
-const dist_index_js_string_pointer = Api.StringPointer{ .length = "dist/index.js".len };
-const index_js_string_pointer = Api.StringPointer{ .length = "index.js".len, .offset = "dist/".len };
-
-fn upsert(this: *GenerateNodeModuleBundle, module_id: u32, resolve: _resolver.Result) !void {
- var dedupe = try this.enqueued_map.getOrPut(module_id);
- if (dedupe.found_existing) return;
- var task = try this.allocator.create(ThreadPool.Worker.ProcessFileTask);
- task.* = ThreadPool.Worker.ProcessFileTask{
- .resolution = resolve,
- };
- _ = this.pool.pending_count.fetchAdd(1, .Monotonic);
- this.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task));
-}
-
-pub fn ensurePathIsAllocated(this: *GenerateNodeModuleBundle, path_: ?*Fs.Path) !void {
- var path = path_ orelse return;
-
- path.* = try path.dupeAlloc(this.allocator);
-}
-
-pub fn enqueueItem(this: *GenerateNodeModuleBundle, resolve: _resolver.Result) !void {
- var result = resolve;
- var path = result.path() orelse return;
-
- const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file;
- path.* = try path.dupeAlloc(this.allocator);
-
- if (BundledModuleData.get(this, &result)) |mod| {
- try this.upsert(mod.module_id, result);
- } else {
- try this.upsert(result.hash(this.bundler.fs.top_level_dir, loader), result);
- }
-}
-
-// The bun Bundle Format
-// All the node_modules your app uses in a single compact file with metadata
-// A binary JavaScript bundle format prioritizing generation time and deserialization time
-pub const magic_bytes = "#!/usr/bin/env bun\n\n";
-// This makes it possible to do ./path-to-bundle on posix systems so you can see the raw JS contents
-// https://en.wikipedia.org/wiki/Magic_number_(programming)#In_files
-// Immediately after the magic bytes, the next character is a uint32 followed by a newline
-// 0x00000000\n
-// That uint32 denotes the byte offset in the file where the code for the bundle ends
-// - If the value is 0, that means the file did not finish writing or there are no modules
-// - This imposes a maximum bundle size of around 4,294,967,295 bytes. If your JS is more than 4 GB, it won't work.
-// The raw JavaScript is encoded as a UTF-8 string starting from the current position + 1 until the above byte offset.
-// This uint32 is useful for HTTP servers to separate:
-// - Which part of the bundle is the JS code?
-// - Which part is the metadata?
-// Without needing to do a full pass through the file, or necessarily care about the metadata.
-// The metadata is at the bottom of the file instead of the top because the metadata is written after all JS code in the bundle is written.
-// The rationale there is:
-// 1. We cannot prepend to a file without rewriting the entire file
-// 2. The metadata is variable-length and that format will change often.
-// 3. We won't have all the metadata until after all JS is finished writing
-// If you have 32 MB of JavaScript dependencies, you really want to avoid reading the code in memory.
-// - This lets you seek to the specific position in the file.
-// - HTTP servers should use sendfile() instead of copying the file to userspace memory.
-// So instead, we append metadata to the file after printing each node_module
-// When there are no more modules to process, we generate the metadata
-// To find the metadata, you look at the byte offset: initial_header[magic_bytes.len..initial_header.len - 1]
-// Then, you add that number to initial_header.len
-const initial_header = brk: {
- var buf = std.mem.zeroes([magic_bytes.len + 5]u8);
- // bun.copy(u8, &buf, magic_bytes);
- // var remainder = buf[magic_bytes.len..];
- // // Write an invalid byte offset to be updated after we finish generating the code
- // std.mem.writeIntNative(u32, remainder[0 .. remainder.len - 1], 0);
- // buf[buf.len - 1] = '\n';
- break :brk buf;
-};
-const code_start_byte_offset: u32 = initial_header.len;
-// The specifics of the metadata is not documented here. You can find it in src/api/schema.peechy.
-
-pub fn appendHeaderString(generator: *GenerateNodeModuleBundle, str: string) !Api.StringPointer {
- // This is so common we might as well just reuse it
- // Plus this is one machine word so it's a quick comparison
- if (strings.eqlComptime(str, "index.js")) {
- return index_js_string_pointer;
- } else if (strings.eqlComptime(str, "dist/index.js")) {
- return dist_index_js_string_pointer;
- }
-
- var offset = generator.header_string_buffer.list.items.len;
- try generator.header_string_buffer.append(str);
- return Api.StringPointer{
- .offset = @truncate(u32, offset),
- .length = @truncate(u32, str.len),
- };
-}
-
-pub fn generate(
- bundler: *ThisBundler,
- allocator: std.mem.Allocator,
- framework_config: ?Api.LoadedFramework,
- route_config: ?Api.LoadedRouteConfig,
- destination: [*:0]const u8,
- estimated_input_lines_of_code: *usize,
- package_bundle_map: options.BundlePackage.Map,
-) !?Api.JavascriptBundleContainer {
- _ = try bundler.fs.fs.openTmpDir();
- var tmpname_buf: [64]u8 = undefined;
- bundler.resetStore();
- try bundler.configureDefines();
-
- const tmpname = try bundler.fs.tmpname(
- ".bun",
- tmpname_buf[0..64],
- std.hash.Wyhash.hash(@intCast(usize, std.time.milliTimestamp()) % std.math.maxInt(u32), std.mem.span(destination)),
- );
-
- var tmpfile = Fs.FileSystem.RealFS.Tmpfile{};
- try tmpfile.create(&bundler.fs.fs, tmpname);
-
- errdefer tmpfile.closeAndDelete(tmpname);
-
- var generator = try allocator.create(GenerateNodeModuleBundle);
- var queue = BunQueue.init(allocator);
- defer allocator.destroy(generator);
- generator.* = GenerateNodeModuleBundle{
- .module_list = std.ArrayList(Api.JavascriptBundledModule).init(allocator),
- .package_list = std.ArrayList(Api.JavascriptBundledPackage).init(allocator),
- .header_string_buffer = try MutableString.init(allocator, "dist/index.js".len),
- .allocator = allocator,
- .enqueued_map = std.AutoHashMap(u32, void).init(allocator),
- .queue = queue,
- .estimated_input_lines_of_code = 0,
- // .resolve_queue = queue,
- .bundler = bundler,
- .tmpfile = tmpfile.file(),
-
- .dynamic_import_file_size_store = U32Map.init(allocator),
- .dynamic_import_file_size_store_lock = Lock.init(),
- .log = bundler.log,
- .package_list_map = std.AutoHashMap(u64, u32).init(allocator),
- .pool = undefined,
- .write_lock = Lock.init(),
- .package_bundle_map = package_bundle_map,
- };
- // dist/index.js appears more common than /index.js
- // but this means we can store both "dist/index.js" and "index.js" in one.
- try generator.header_string_buffer.append("dist/index.js");
- try generator.package_list_map.ensureTotalCapacity(128);
- var pool = try allocator.create(ThreadPool);
- pool.* = ThreadPool{};
- generator.pool = pool;
-
- var this = generator;
- // Always inline the runtime into the bundle
- try generator.appendBytes(&initial_header);
-
- if (bundler.log.level == .verbose) {
- bundler.resolver.debug_logs = try DebugLogs.init(allocator);
- }
-
- Analytics.Features.bun_bun = true;
-
- always_bundled: {
- const root_package_json_resolved: _resolver.Result = bundler.resolver.resolve(bundler.fs.top_level_dir, "./package.json", .stmt) catch {
- generator.log.addWarning(null, logger.Loc.Empty, "Please run `bun bun` from a directory containing a package.json.") catch unreachable;
- break :always_bundled;
- };
- const root_package_json = root_package_json_resolved.package_json orelse brk: {
- const read_dir = (bundler.resolver.readDirInfo(bundler.fs.top_level_dir) catch unreachable).?;
- Analytics.Features.tsconfig = Analytics.Features.tsconfig or read_dir.tsconfig_json != null;
- break :brk read_dir.package_json.?;
- };
- Analytics.setProjectID(std.fs.path.dirname(root_package_json.source.path.text) orelse "/", root_package_json.name);
- if (bundler.macro_context) |macro_ctx| {
- Analytics.Features.macros = macro_ctx.remap.count() > 0;
- }
-
- const bundle_keys = package_bundle_map.keys();
- const do_always_bundle = package_bundle_map.values();
- var always_bundle_count: u32 = @truncate(u32, bundle_keys.len);
-
- if (always_bundle_count != 0) {
- Analytics.Features.always_bundle = true;
- var always_bundled_package_jsons = bundler.allocator.alloc(*PackageJSON, always_bundle_count) catch unreachable;
- var always_bundled_package_hashes = bundler.allocator.alloc(u32, always_bundle_count) catch unreachable;
- var always_bundled_booleans = bundler.allocator.alloc(bool, always_bundle_count) catch unreachable;
- var i: u16 = 0;
-
- inner: for (bundle_keys, 0..) |name, k| {
- bun.copy(u8, &Bundler.tmp_buildfile_buf, name);
- bun.copy(u8, Bundler.tmp_buildfile_buf[name.len..], "/package.json");
- const package_json_import = Bundler.tmp_buildfile_buf[0 .. name.len + "/package.json".len];
- const result = bundler.resolver.resolve(bundler.fs.top_level_dir, package_json_import, .stmt) catch |err| {
- generator.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving always bundled module \"{s}\"", .{ @errorName(err), name }) catch unreachable;
- continue :inner;
- };
-
- var package_json: *PackageJSON = result.package_json orelse brk: {
- const read_dir = (bundler.resolver.readDirInfo(package_json_import) catch unreachable).?;
- if (read_dir.package_json == null) {
- generator.log.addWarningFmt(null, logger.Loc.Empty, bundler.allocator, "{s} missing package.json. It will not be bundled", .{name}) catch unreachable;
- continue :inner;
- }
- break :brk read_dir.package_json.?;
- };
-
- package_json.source.key_path = result.path_pair.primary;
-
- // if (!strings.contains(result.path_pair.primary.text, package_json.name)) {
- // generator.log.addErrorFmt(
- // null,
- // logger.Loc.Empty,
- // bundler.allocator,
- // "Bundling \"{s}\" is not supported because the package isn.\n To fix this, move the package's code to a directory containing the name.\n Location: \"{s}\"",
- // .{
- // name,
- // name,
- // result.path_pair.primary.text,
- // },
- // ) catch unreachable;
- // continue :inner;
- // }
-
- always_bundled_package_jsons[i] = package_json;
- always_bundled_package_hashes[i] = package_json.hash;
- always_bundled_booleans[i] = do_always_bundle[k] == .always;
- i += 1;
- }
- generator.always_bundled_package_hashes = always_bundled_package_hashes[0..i];
- generator.always_bundled_package_jsons = always_bundled_package_jsons[0..i];
- generator.always_bundled_booleans = always_bundled_booleans[0..i];
- }
- }
- if (generator.log.errors > 0) return error.BundleFailed;
-
- this.bundler.macro_context = js_ast.Macro.MacroContext.init(bundler);
-
- const include_refresh_runtime =
- !this.bundler.options.production and
- this.bundler.options.jsx.supports_fast_refresh and
- bundler.options.platform.isWebLike();
-
- if (framework_config != null) {
- defer this.bundler.resetStore();
-
- try this.bundler.configureFramework(true);
- if (bundler.options.framework) |framework| {
- Analytics.Features.framework = true;
-
- if (framework.override_modules.keys.len > 0) {
- bundler.options.framework.?.override_modules_hashes = allocator.alloc(u64, framework.override_modules.keys.len) catch unreachable;
- for (framework.override_modules.keys, 0..) |key, i| {
- bundler.options.framework.?.override_modules_hashes[i] = std.hash.Wyhash.hash(0, key);
- }
- }
- }
- } else {}
-
- this.pool.start(this) catch |err| {
- Analytics.enqueue(Analytics.EventName.bundle_fail);
- return err;
- };
-
- // The ordering matters here The runtime must appear at the top of
- // the file But, we don't know which version of the runtime is
- // necessary until we know if they'll use JSX and if they'll use
- // react refresh
- var _new_jsx_runtime_resolve_result: ?_resolver.Result = null;
- var fast_refresh_resolve_result: ?_resolver.Result = null;
- // Normally, this is automatic
- // However, since we only do the parsing pass, it may not get imported automatically.
- if (bundler.options.jsx.parse) {
- defer this.bundler.resetStore();
- if (this.bundler.resolver.resolve(
- this.bundler.fs.top_level_dir,
- this.bundler.options.jsx.importSource(),
- .require,
- )) |new_jsx_runtime| {
- _new_jsx_runtime_resolve_result = new_jsx_runtime;
- this.ensurePathIsAllocated(_new_jsx_runtime_resolve_result.?.path()) catch unreachable;
- Analytics.Features.jsx = true;
- } else |_| {}
- }
-
- const include_fast_refresh_in_bundle = Analytics.Features.jsx and
- include_refresh_runtime and
- !Analytics.Features.fast_refresh and !bundler.options.platform.isServerSide();
-
- var refresh_runtime_module_id: u32 = 0;
- if (include_refresh_runtime) {
- defer this.bundler.resetStore();
-
- if (this.bundler.resolver.resolve(
- this.bundler.fs.top_level_dir,
- this.bundler.options.jsx.refresh_runtime,
- .require,
- )) |refresh_runtime| {
- fast_refresh_resolve_result = refresh_runtime;
- this.ensurePathIsAllocated(fast_refresh_resolve_result.?.path()) catch unreachable;
- Analytics.Features.fast_refresh = true;
-
- if (BundledModuleData.get(this, &refresh_runtime)) |mod| {
- refresh_runtime_module_id = mod.module_id;
- }
- } else |_| {}
- }
-
- if (Environment.isDebug) {
- switch (bundler.options.platform) {
- .node => try generator.appendBytes(runtime.Runtime.sourceContentNode()),
- .bun_macro, .bun => try generator.appendBytes(runtime.Runtime.sourceContentBun()),
- else => try generator.appendBytes(runtime.Runtime.sourceContent(include_fast_refresh_in_bundle)),
- }
-
- try generator.appendBytes("\n\n");
- } else if (include_fast_refresh_in_bundle) {
- try generator.appendBytes(comptime runtime.Runtime.sourceContent(true) ++ "\n\n");
- } else {
- try generator.appendBytes(
- switch (bundler.options.platform) {
- .bun_macro, .bun => comptime @as(string, runtime.Runtime.sourceContentBun() ++ "\n\n"),
- .node => comptime @as(string, runtime.Runtime.sourceContentNode() ++ "\n\n"),
- else => comptime @as(string, runtime.Runtime.sourceContentWithoutRefresh() ++ "\n\n"),
- },
- );
- }
-
- if (_new_jsx_runtime_resolve_result) |new_jsx_runtime| {
- try this.enqueueItem(new_jsx_runtime);
- _new_jsx_runtime_resolve_result = null;
- }
-
- if (fast_refresh_resolve_result) |fast_refresh| {
- try this.enqueueItem(fast_refresh);
- fast_refresh_resolve_result = null;
- }
-
- if (bundler.router) |router| {
- defer this.bundler.resetStore();
- Analytics.Features.filesystem_router = true;
-
- const entry_points = try router.getEntryPoints();
- for (entry_points) |entry_point| {
- const resolved = bundler.resolveEntryPoint(entry_point) catch continue;
- try this.enqueueItem(resolved);
- }
- this.bundler.resetStore();
- } else {}
-
- if (bundler.options.framework) |framework| {
- if (bundler.options.platform.isBun()) {
- if (framework.server.isEnabled()) {
- Analytics.Features.bunjs = true;
- const resolved = try bundler.resolver.resolve(
- bundler.fs.top_level_dir,
- framework.server.path,
- .entry_point,
- );
- try this.enqueueItem(resolved);
- }
- } else {
- if (framework.client.isEnabled()) {
- const resolved = try bundler.resolver.resolve(
- bundler.fs.top_level_dir,
- framework.client.path,
- .entry_point,
- );
- try this.enqueueItem(resolved);
- }
-
- if (framework.fallback.isEnabled()) {
- const resolved = try bundler.resolver.resolve(
- bundler.fs.top_level_dir,
- framework.fallback.path,
- .entry_point,
- );
- try this.enqueueItem(resolved);
- }
- }
- }
-
- for (bundler.options.entry_points) |entry_point| {
- defer this.bundler.resetStore();
-
- const entry_point_path = bundler.normalizeEntryPointPath(entry_point);
- const resolved = bundler.resolveEntryPoint(entry_point_path) catch continue;
- try this.enqueueItem(resolved);
- }
-
- Analytics.enqueue(Analytics.EventName.bundle_start);
- this.bundler.resetStore();
- this.pool.wait(this) catch |err| {
- Analytics.enqueue(Analytics.EventName.bundle_fail);
- return err;
- };
- Analytics.enqueue(Analytics.EventName.bundle_success);
-
- estimated_input_lines_of_code.* = generator.estimated_input_lines_of_code;
-
- // if (comptime !isRelease) {
- // this.queue.checkDuplicatesSlow();
- // }
-
- if (this.log.errors > 0) {
- tmpfile.closeAndDelete(tmpname);
- // We stop here because if there are errors we don't know if the bundle is valid
- // This manifests as a crash when sorting through the module list because we may have added files to the bundle which were never actually finished being added.
- return null;
- }
-
- if (this.module_list.items.len == 0) {
- tmpfile.closeAndDelete(tmpname);
- Output.prettyErrorln(
- \\<r><red>error<r>: no dependencies to bundle!
- \\
- \\"bun bun" currently only bundles dependencies in node_modules.
- \\
- ,
- .{},
- );
- Global.crash();
- }
-
- if (include_refresh_runtime and refresh_runtime_module_id > 0) {
- var refresh_runtime_injector_buf: [1024]u8 = undefined;
- var fixed_buffer = std.io.fixedBufferStream(&refresh_runtime_injector_buf);
- var fixed_buffer_writer = fixed_buffer.writer();
- const hex_bytes = bun.fmt.hexIntLower(refresh_runtime_module_id);
-
- fixed_buffer_writer.print(
- \\if ('window' in globalThis) {{
- \\ (function() {{
- \\ BUN_RUNTIME.__injectFastRefresh(${any}());
- \\ }})();
- \\}}
- ,
- .{hex_bytes},
- ) catch unreachable;
- try this.tmpfile.writeAll(fixed_buffer.buffer[0..fixed_buffer.pos]);
- }
-
- // Ensure we never overflow
- this.code_end_byte_offset = @truncate(
- u32,
- // Doing this math ourself seems to not necessarily produce correct results
- (try this.tmpfile.getPos()),
- );
-
- var javascript_bundle_container = std.mem.zeroes(Api.JavascriptBundleContainer);
-
- std.sort.sort(
- Api.JavascriptBundledModule,
- this.module_list.items,
- this,
- GenerateNodeModuleBundle.sortJavascriptModuleByPath,
- );
-
- if (comptime Environment.isDebug) {
- const SeenHash = std.AutoHashMap(u64, void);
- var map = SeenHash.init(this.allocator);
- var ids = SeenHash.init(this.allocator);
- try map.ensureTotalCapacity(@truncate(u32, this.module_list.items.len));
- try ids.ensureTotalCapacity(@truncate(u32, this.module_list.items.len));
-
- for (this.module_list.items) |a| {
- const a_pkg: Api.JavascriptBundledPackage = this.package_list.items[a.package_id];
- const a_name = this.metadataStringPointer(a_pkg.name);
- const a_version = this.metadataStringPointer(a_pkg.version);
- const a_path = this.metadataStringPointer(a.path);
-
- std.debug.assert(a_name.len > 0);
- std.debug.assert(a_version.len > 0);
- std.debug.assert(a_path.len > 0);
- var hash_print = std.mem.zeroes([4096]u8);
- const hash = std.hash.Wyhash.hash(0, std.fmt.bufPrint(&hash_print, "{s}@{s}/{s}", .{ a_name, a_version, a_path }) catch unreachable);
- var result1 = map.getOrPutAssumeCapacity(hash);
- std.debug.assert(!result1.found_existing);
-
- var result2 = ids.getOrPutAssumeCapacity(a.id);
- std.debug.assert(!result2.found_existing);
- }
- }
-
- var hasher = std.hash.Wyhash.init(0);
-
- // We want to sort the packages as well as the files
- // The modules sort the packages already
- // So can just copy it in the below loop.
- var sorted_package_list = try allocator.alloc(Api.JavascriptBundledPackage, this.package_list.items.len);
-
- // At this point, the module_list is sorted.
- if (this.module_list.items.len > 0) {
- var package_id_i: u32 = 0;
- var i: usize = 0;
- // Assumption: node_modules are immutable
- // Assumption: module files are immutable
- // (They're not. But, for our purposes that's okay)
- // The etag is:
- // - The hash of each module's path in sorted order
- // - The hash of each module's code size in sorted order
- // - hash(hash(package_name, package_version))
- // If this doesn't prove strong enough, we will do a proper content hash
- // But I want to avoid that overhead unless proven necessary.
- // There's a good chance we don't even strictly need an etag here.
- var bytes: [4]u8 = undefined;
- while (i < this.module_list.items.len) {
- var current_package_id = this.module_list.items[i].package_id;
- this.module_list.items[i].package_id = package_id_i;
- var offset = @truncate(u32, i);
-
- i += 1;
-
- while (i < this.module_list.items.len and this.module_list.items[i].package_id == current_package_id) : (i += 1) {
- this.module_list.items[i].package_id = package_id_i;
- // Hash the file path
- hasher.update(this.metadataStringPointer(this.module_list.items[i].path));
- // Then the length of the code
- std.mem.writeIntNative(u32, &bytes, this.module_list.items[i].code.length);
- hasher.update(&bytes);
- }
-
- this.package_list.items[current_package_id].modules_offset = offset;
- this.package_list.items[current_package_id].modules_length = @truncate(u32, i) - offset;
-
- // Hash the hash of the package name
- // it's hash(hash(package_name, package_version))
- std.mem.writeIntNative(u32, &bytes, this.package_list.items[current_package_id].hash);
- hasher.update(&bytes);
-
- sorted_package_list[package_id_i] = this.package_list.items[current_package_id];
- package_id_i += 1;
- }
- }
-
- var javascript_bundle = std.mem.zeroes(Api.JavascriptBundle);
- javascript_bundle.modules = this.module_list.items;
- javascript_bundle.packages = sorted_package_list;
- javascript_bundle.manifest_string = this.header_string_buffer.list.items;
- const etag_u64 = hasher.final();
- // We store the etag as a ascii hex encoded u64
- // This is so we can send the bytes directly in the HTTP server instead of formatting it as hex each time.
- javascript_bundle.etag = try std.fmt.allocPrint(allocator, "{any}", .{bun.fmt.hexIntLower(etag_u64)});
- javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp()));
-
- const basename = std.fs.path.basename(std.mem.span(destination));
- const extname = std.fs.path.extension(basename);
- javascript_bundle.import_from_name = if (bundler.options.platform.isBun())
- "/node_modules.server.bun"
- else
- try std.fmt.allocPrint(
- this.allocator,
- "/{s}.{s}.bun",
- .{
- basename[0 .. basename.len - extname.len],
- javascript_bundle.etag,
- },
- );
-
- javascript_bundle_container.bundle_format_version = current_version;
- javascript_bundle_container.bundle = javascript_bundle;
- javascript_bundle_container.code_length = this.code_end_byte_offset;
- javascript_bundle_container.framework = framework_config;
- javascript_bundle_container.routes = route_config;
-
- var start_pos = try this.tmpfile.getPos();
- var tmpwriter = std.io.bufferedWriter(this.tmpfile.writer());
- const SchemaWriter = schema.Writer(@TypeOf(tmpwriter.writer()));
- var schema_file_writer = SchemaWriter.init(tmpwriter.writer());
- try javascript_bundle_container.encode(&schema_file_writer);
- try tmpwriter.flush();
-
- // sanity check
- if (Environment.isDebug) {
- try this.tmpfile.seekTo(start_pos);
- var contents = try allocator.alloc(u8, (try this.tmpfile.getEndPos()) - start_pos);
- var read_bytes = try this.tmpfile.read(contents);
- var buf = contents[0..read_bytes];
- var reader = schema.Reader.init(buf, allocator);
-
- var decoder = try Api.JavascriptBundleContainer.decode(
- &reader,
- );
- std.debug.assert(decoder.code_length.? == javascript_bundle_container.code_length.?);
- }
-
- var code_length_bytes: [4]u8 = undefined;
- std.mem.writeIntNative(u32, &code_length_bytes, this.code_end_byte_offset);
- _ = try std.os.pwrite(this.tmpfile.handle, &code_length_bytes, magic_bytes.len);
-
- // Without his mutex, we get a crash at this location:
- // try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, destination);
- // ^
- const top_dir = try std.fs.openDirAbsolute(Fs.FileSystem.instance.top_level_dir, .{});
- _ = C.fchmod(
- this.tmpfile.handle,
- // chmod 777
- 0o0000010 | 0o0000100 | 0o0000001 | 0o0001000 | 0o0000040 | 0o0000004 | 0o0000002 | 0o0000400 | 0o0000200 | 0o0000020,
- );
- try tmpfile.promote(tmpname, top_dir.fd, destination);
- // Print any errors at the end
- // try this.log.print(Output.errorWriter());
- return javascript_bundle_container;
-}
-
-pub fn metadataStringPointer(this: *GenerateNodeModuleBundle, ptr: Api.StringPointer) string {
- return this.header_string_buffer.list.items[ptr.offset .. ptr.offset + ptr.length];
-}
-
-// Since we trim the prefixes, we must also compare the package name and version
-pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool {
- return switch (std.mem.order(
- u8,
- ctx.metadataStringPointer(
- ctx.package_list.items[a.package_id].name,
- ),
- ctx.metadataStringPointer(
- ctx.package_list.items[b.package_id].name,
- ),
- )) {
- .eq => switch (std.mem.order(
- u8,
- ctx.metadataStringPointer(
- ctx.package_list.items[a.package_id].version,
- ),
- ctx.metadataStringPointer(
- ctx.package_list.items[b.package_id].version,
- ),
- )) {
- .eq => std.mem.order(
- u8,
- ctx.metadataStringPointer(a.path),
- ctx.metadataStringPointer(b.path),
- ) == .lt,
- .lt => true,
- else => false,
- },
- .lt => true,
- else => false,
- };
-}
-
-// pub fn sortJavascriptPackageByName(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledPackage, b: Api.JavascriptBundledPackage) bool {
-// return std.mem.order(u8, ctx.metadataStringPointer(a.name), ctx.metadataStringPointer(b.name)) == .lt;
-// }
-
-pub fn appendBytes(generator: *GenerateNodeModuleBundle, bytes: anytype) !void {
- try generator.tmpfile.writeAll(bytes);
- generator.tmpfile_byte_offset += @truncate(u32, bytes.len);
-}
-
-const BundledModuleData = struct {
- import_path: string,
- package_path: string,
- package: *const PackageJSON,
- module_id: u32,
-
- pub fn getForceBundle(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData {
- return _get(this, resolve_result, true, false);
- }
-
- pub fn getForceBundleForMain(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData {
- return _get(this, resolve_result, true, true);
- }
-
- threadlocal var normalized_package_path: [512]u8 = undefined;
- threadlocal var normalized_package_path2: [512]u8 = undefined;
- inline fn _get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result, comptime force: bool, comptime is_main: bool) ?BundledModuleData {
- const path = resolve_result.pathConst() orelse return null;
- if (strings.eqlComptime(path.namespace, "node")) {
- const _import_path = path.text["/bun-vfs/node_modules/".len..][resolve_result.package_json.?.name.len + 1 ..];
- return BundledModuleData{
- .import_path = _import_path,
- .package_path = path.text["/bun-vfs/node_modules/".len..],
- .package = resolve_result.package_json.?,
- .module_id = resolve_result.package_json.?.hashModule(_import_path),
- };
- }
-
- var import_path = path.text;
- var package_path = path.text;
- var file_path = path.text;
-
- if (resolve_result.package_json) |pkg_| {
- var pkg: *const PackageJSON = pkg_;
- if (this.package_bundle_map.get(pkg.name)) |result| {
- if (result == .never) return null;
- }
-
- if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash)) |pkg_i| {
- pkg = this.always_bundled_package_jsons[pkg_i];
- if (!this.always_bundled_booleans[pkg_i]) return null;
- const key_path_source_dir = pkg.source.key_path.sourceDir();
- const default_source_dir = pkg.source.path.sourceDir();
-
- if (strings.startsWith(path.text, key_path_source_dir)) {
- import_path = path.text[key_path_source_dir.len..];
- } else if (strings.startsWith(path.text, default_source_dir)) {
- import_path = path.text[default_source_dir.len..];
- } else if (strings.startsWith(path.pretty, pkg.name)) {
- import_path = path.pretty[pkg.name.len + 1 ..];
- }
-
- var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path;
-
- bun.copy(u8, buf_to_use, pkg.name);
- buf_to_use[pkg.name.len] = '/';
- bun.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path);
- package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1];
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = pkg,
- .module_id = pkg.hashModule(package_path),
- };
- }
- }
-
- const root: _resolver.RootPathPair = this.bundler.resolver.rootNodeModulePackageJSON(
- resolve_result,
- ) orelse return null;
-
- var base_path = root.base_path;
- const package_json = root.package_json;
-
- if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, package_json.hash)) |pkg_i| {
- var pkg = this.always_bundled_package_jsons[pkg_i];
- if (!this.always_bundled_booleans[pkg_i]) return null;
- const key_path_source_dir = pkg.source.key_path.sourceDir();
- const default_source_dir = pkg.source.path.sourceDir();
-
- if (strings.startsWith(path.text, key_path_source_dir)) {
- import_path = path.text[key_path_source_dir.len..];
- } else if (strings.startsWith(path.text, default_source_dir)) {
- import_path = path.text[default_source_dir.len..];
- } else if (strings.startsWith(path.pretty, pkg.name)) {
- import_path = path.pretty[pkg.name.len + 1 ..];
- }
-
- var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path;
-
- bun.copy(u8, buf_to_use, pkg.name);
- buf_to_use[pkg.name.len] = '/';
- bun.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path);
- package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1];
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = pkg,
- .module_id = pkg.hashModule(package_path),
- };
- }
-
- // Easymode: the file path doesn't need to be remapped.
- if (strings.startsWith(file_path, base_path)) {
- import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/");
- package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/");
- std.debug.assert(import_path.len > 0);
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = package_json,
- .module_id = package_json.hashModule(package_path),
- };
- }
-
- if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| {
- package_path = file_path[i..];
- import_path = package_path[package_json.name.len + 1 ..];
- std.debug.assert(import_path.len > 0);
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = package_json,
- .module_id = package_json.hashModule(package_path),
- };
- }
-
- if (comptime force) {
- if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, root.package_json.hash)) |pkg_json_i| {
- const pkg_json = this.always_bundled_package_jsons[pkg_json_i];
-
- base_path = pkg_json.source.key_path.sourceDir();
-
- if (strings.startsWith(file_path, base_path)) {
- import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/");
- package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/");
- std.debug.assert(import_path.len > 0);
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = package_json,
- .module_id = package_json.hashModule(package_path),
- };
- }
-
- if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| {
- package_path = file_path[i..];
- import_path = package_path[package_json.name.len + 1 ..];
- std.debug.assert(import_path.len > 0);
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = package_json,
- .module_id = package_json.hashModule(package_path),
- };
- }
-
- // This is our last resort.
- // The package is supposed to be bundled
- // package.json says its' in packages/@foo/bar/index.js
- // The file really is in packages/bar/index.js
- // But we need the import path to contain the package path for bundling to work
- // so we fake it
- // we say ""
- if (package_json.name[0] == '@') {
- if (std.mem.indexOfScalar(u8, package_json.name, '/')) |at| {
- const package_subpath = package_json.name[at + 1 ..];
- if (std.mem.lastIndexOf(u8, file_path, package_subpath)) |i| {
- package_path = this.bundler.fs.dirname_store.print("{s}/{s}", .{ package_json.name, file_path[i + package_subpath.len ..] }) catch unreachable;
- import_path = package_path[package_json.name.len + 1 ..];
- return BundledModuleData{
- .import_path = import_path,
- .package_path = package_path,
- .package = package_json,
- .module_id = package_json.hashModule(package_path),
- };
- }
- }
- }
- }
- }
-
- return null;
- }
-
- pub fn get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData {
- return _get(this, resolve_result, false, false);
- }
-};
-
-fn writeEmptyModule(this: *GenerateNodeModuleBundle, package_relative_path: string, module_id: u32) !u32 {
- this.write_lock.lock();
- defer this.write_lock.unlock();
- var code_offset = @truncate(u32, try this.tmpfile.getPos());
- var writer = this.tmpfile.writer();
- var buffered = std.io.bufferedWriter(writer);
-
- var bufwriter = buffered.writer();
- try bufwriter.writeAll("// ");
- try bufwriter.writeAll(package_relative_path);
- try bufwriter.writeAll(" (disabled/empty)\nexport var $");
- std.fmt.formatInt(module_id, 16, .lower, .{}, bufwriter) catch unreachable;
- try bufwriter.writeAll(" = () => { var obj = {}; Object.defineProperty(obj, 'default', { value: obj, enumerable: false, configurable: true }, obj); return obj; }; \n");
- try buffered.flush();
- this.tmpfile_byte_offset = @truncate(u32, try this.tmpfile.getPos());
- return code_offset;
-}
-
-fn processImportRecord(_: *GenerateNodeModuleBundle, _: ImportRecord) !void {}
-var json_ast_symbols = [_]js_ast.Symbol{
- js_ast.Symbol{ .original_name = "$$m" },
- js_ast.Symbol{ .original_name = "exports" },
- js_ast.Symbol{ .original_name = "module" },
- js_ast.Symbol{ .original_name = "CONGRATS_YOU_FOUND_A_BUG" },
- js_ast.Symbol{ .original_name = "$$bun_runtime_json_parse" },
-};
-const json_parse_string = "parse";
-
-threadlocal var override_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
-
-pub fn appendToModuleList(
- this: *GenerateNodeModuleBundle,
- package: *const PackageJSON,
- module_id: u32,
- code_offset: u32,
- package_relative_path: string,
-) !void {
- this.list_lock.lock();
- defer this.list_lock.unlock();
-
- const code_length = @atomicLoad(u32, &this.tmpfile_byte_offset, .SeqCst) - code_offset;
-
- if (comptime Environment.isDebug) {
- std.debug.assert(code_length > 0);
- std.debug.assert(package.hash != 0);
- std.debug.assert(package.version.len > 0);
- std.debug.assert(package.name.len > 0);
- std.debug.assert(module_id > 0);
- }
-
- var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash);
-
- if (!package_get_or_put_entry.found_existing) {
- package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len);
- try this.package_list.append(
- Api.JavascriptBundledPackage{
- .name = try this.appendHeaderString(package.name),
- .version = try this.appendHeaderString(package.version),
- .hash = package.hash,
- },
- );
- this.has_jsx = this.has_jsx or strings.eql(package.name, this.bundler.options.jsx.package_name);
- }
-
- var path_extname_length = @truncate(u8, std.fs.path.extension(package_relative_path).len);
- try this.module_list.append(
- Api.JavascriptBundledModule{
- .path = try this.appendHeaderString(
- package_relative_path,
- ),
- .path_extname_length = path_extname_length,
- .package_id = package_get_or_put_entry.value_ptr.*,
- .id = module_id,
- .code = Api.StringPointer{
- .length = @truncate(u32, code_length),
- .offset = @truncate(u32, code_offset),
- },
- },
- );
-}
-threadlocal var json_e_string: js_ast.E.String = undefined;
-threadlocal var json_e_call: js_ast.E.Call = undefined;
-threadlocal var json_e_identifier: js_ast.E.Identifier = undefined;
-threadlocal var json_call_args: [1]js_ast.Expr = undefined;
-const PendingImports = std.AutoArrayHashMap(u32, ThreadPool.Worker.ProcessFileTask);
-pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, bundler: *Bundler, _resolve: _resolver.Result) !void {
- const resolve = _resolve;
- if (resolve.is_external) return;
-
- var shared_buffer = &worker.data.shared_buffer;
- var scan_pass_result = &worker.data.scan_pass_result;
- var file_path = (resolve.pathConst() orelse unreachable).*;
-
- var add_to_bundle = brk: {
- if (resolve.package_json) |package_json| {
- if (this.package_bundle_map.get(package_json.name)) |result| {
- break :brk result == .always;
- }
- }
-
- break :brk resolve.isLikelyNodeModule();
- };
-
- const source_dir = file_path.sourceDir();
- const loader = bundler.options.loader(file_path.name.ext);
- const platform = bundler.options.platform;
-
- defer scan_pass_result.reset();
- defer shared_buffer.reset();
- defer this.bundler.resetStore();
- var log = worker.data.log;
- var queue = PendingImports.init(worker.allocator);
- var __module_data: ?BundledModuleData = null;
- if (add_to_bundle) {
- __module_data = BundledModuleData.getForceBundleForMain(this, &resolve);
- }
- // If we're in a node_module, build that almost normally
- if (add_to_bundle and __module_data != null) {
- const module_data = __module_data.?;
-
- var code_offset: u32 = 0;
-
- const module_id = module_data.module_id;
- const package = module_data.package;
- const package_relative_path = module_data.import_path;
-
- file_path.pretty = module_data.package_path;
-
- const entry: CacheEntry = brk: {
- if (this.bundler.options.framework) |framework| {
- if (framework.override_modules_hashes.len > 0) {
- const package_relative_path_hash = std.hash.Wyhash.hash(0, module_data.package_path);
- if (std.mem.indexOfScalar(
- u64,
- framework.override_modules_hashes,
- package_relative_path_hash,
- )) |index| {
- const relative_path = [_]string{
- framework.resolved_dir,
- framework.override_modules.values[index],
- };
- var override_path = this.bundler.fs.absBuf(
- &relative_path,
- &override_file_path_buf,
- );
- override_file_path_buf[override_path.len] = 0;
- var override_pathZ = override_file_path_buf[0..override_path.len :0];
- break :brk try bundler.resolver.caches.fs.readFileShared(
- bundler.fs,
- override_pathZ,
- 0,
- null,
- shared_buffer,
- );
- }
- }
- }
-
- if (!strings.eqlComptime(file_path.namespace, "node"))
- break :brk try bundler.resolver.caches.fs.readFileShared(
- bundler.fs,
- file_path.textZ(),
- resolve.dirname_fd,
- if (resolve.file_fd != 0) resolve.file_fd else null,
- shared_buffer,
- );
-
- break :brk CacheEntry{
- .contents = NodeFallbackModules.contentsFromPath(file_path.text) orelse "",
- };
- };
-
- var approximate_newline_count: usize = 0;
- defer worker.data.estimated_input_lines_of_code += approximate_newline_count;
-
- // Handle empty files
- // We can't just ignore them. Sometimes code will try to import it. Often because of TypeScript types.
- // So we just say it's an empty object. Empty object mimicks what "browser": false does as well.
- // TODO: optimize this so that all the exports for these are done in one line instead of writing repeatedly
- if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) {
- code_offset = try this.writeEmptyModule(module_data.package_path, module_id);
- } else {
- var ast: js_ast.Ast = undefined;
-
- const source = logger.Source.initRecycledFile(
- Fs.File{
- .path = file_path,
- .contents = entry.contents,
- },
- bundler.allocator,
- ) catch return null;
-
- switch (loader) {
- .jsx,
- .tsx,
- .js,
- .ts,
- => {
- var jsx = _resolve.jsx;
- jsx.parse = loader.isJSX();
-
- var opts = js_parser.Parser.Options.init(jsx, loader);
- opts.legacy_transform_require_to_import = false;
- opts.features.dynamic_require = bundler.options.platform.isBun();
-
- opts.enable_legacy_bundling = true;
- opts.warn_about_unbundled_modules = false;
- opts.macro_context = &worker.data.macro_context;
- opts.features.auto_import_jsx = jsx.parse;
- opts.features.trim_unused_imports = this.bundler.options.trim_unused_imports orelse loader.isTypeScript();
- opts.tree_shaking = this.bundler.options.tree_shaking;
- ast = (bundler.resolver.caches.js.parse(
- bundler.allocator,
- opts,
- bundler.options.define,
- log,
- &source,
- ) catch null) orelse return;
-
- approximate_newline_count = ast.approximate_newline_count;
- if (ast.import_records.len > 0) {
- for (ast.import_records) |*import_record| {
-
- // Don't resolve the runtime
- if (import_record.is_internal or import_record.is_unused) {
- continue;
- }
-
- if (bundler.options.platform.isBun()) {
- if (JSC.DisabledModule.has(import_record.path.text)) {
- import_record.path.is_disabled = true;
- import_record.do_commonjs_transform_in_printer = true;
- import_record.is_legacy_bundled = true;
- continue;
- }
-
- if (JSC.HardcodedModule.Aliases.get(import_record.path.text)) |remapped| {
- import_record.path.text = remapped.path;
- import_record.tag = remapped.tag;
- if (remapped.tag != .none) {
- import_record.is_legacy_bundled = false;
- continue;
- }
- }
- }
-
- if (bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |tmp| {
- var _resolved_import = tmp;
- if (_resolved_import.is_external) {
- continue;
- }
- var path = _resolved_import.path() orelse {
- import_record.path.is_disabled = true;
-
- continue;
- };
-
- // if (_resolved_import.package_json == null) |pkg_json| {
- // _resolved_import.package_json = if (pkg_json.hash == resolve.package_json.?.hash)
- // resolve.package_json
- // else
- // _resolved_import.package_json;
- // }
-
- const resolved_import: *const _resolver.Result = &_resolved_import;
-
- const _module_data = BundledModuleData.getForceBundle(this, resolved_import) orelse {
- // if a macro imports code that cannot be bundled
- // we just silently disable it
- // because...we need some kind of hook to say "don't bundle this"
- import_record.path.is_disabled = true;
- import_record.tag = .macro;
-
- continue;
- };
- import_record.module_id = _module_data.module_id;
- std.debug.assert(import_record.module_id != 0);
-
- path.* = try path.dupeAlloc(this.allocator);
-
- import_record.path = path.*;
- _ = queue.getOrPutValue(
- _module_data.module_id,
- .{
- .resolution = _resolved_import,
- },
- ) catch unreachable;
- } else |err| {
- if (comptime Environment.isDebug) {
- if (!import_record.handles_import_errors) {
- Output.prettyErrorln("\n<r><red>{s}<r> resolving \"{s}\" from \"{s}\"", .{
- @errorName(err),
- import_record.path.text,
- file_path.text,
- });
- }
- }
-
- // Disable failing packages from being printed.
- // This may cause broken code to write.
- // However, doing this means we tell them all the resolve errors
- // Rather than just the first one.
- import_record.path.is_disabled = true;
-
- switch (err) {
- error.ModuleNotFound => {
- const addError = logger.Log.addResolveErrorWithTextDupeMaybeWarn;
-
- if (!import_record.handles_import_errors) {
- if (isPackagePath(import_record.path.text)) {
- if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve Node.js builtin: \"{s}\".",
- .{import_record.path.text},
- import_record.kind,
- platform.isBun(),
- );
- } else {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?",
- .{import_record.path.text},
- import_record.kind,
- platform.isBun(),
- );
- }
- } else if (!platform.isBun()) {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve: \"{s}\"",
- .{
- import_record.path.text,
- },
- import_record.kind,
- platform.isBun(),
- );
- }
- }
- },
- // assume other errors are already in the log
- else => {},
- }
- }
- }
- }
- },
- .json => {
- // parse the JSON _only_ to catch errors at build time.
- const json_parse_result = json_parser.ParseJSONForBundling(&source, worker.data.log, worker.allocator) catch return;
-
- if (json_parse_result.tag != .empty) {
- const expr = brk: {
- // If it's an ascii string, we just print it out with a big old JSON.parse()
- if (json_parse_result.tag == .ascii) {
- json_call_args[0] = js_ast.Expr.init(
- js_ast.E.String,
- js_ast.E.String{
- .data = source.contents,
- },
- .{ .start = 0 },
- );
-
- break :brk js_ast.Expr.init(
- js_ast.E.Call,
- js_ast.E.Call{
- .target = js_ast.Expr{
- .data = .{
- .e_identifier = js_ast.E.Identifier{
- .ref = Ref.atIndex(json_ast_symbols.len - 1),
- },
- },
- .loc = .{ .start = 0 },
- },
- .args = js_ast.ExprNodeList.init(json_call_args[0..]),
- },
- .{ .start = 0 },
- );
- } else {
- break :brk json_parse_result.expr;
- }
- };
-
- var stmt = js_ast.Stmt.alloc(
- js_ast.S.ExportDefault,
- js_ast.S.ExportDefault{
- .value = js_ast.StmtOrExpr{ .expr = expr },
- .default_name = js_ast.LocRef{
- .loc = .{},
- .ref = null,
- },
- },
- .{ .start = 0 },
- );
- var stmts = worker.allocator.alloc(js_ast.Stmt, 1) catch unreachable;
- stmts[0] = stmt;
- var parts = worker.allocator.alloc(js_ast.Part, 1) catch unreachable;
- parts[0] = js_ast.Part{ .stmts = stmts };
- ast = js_ast.Ast.initTest(parts);
-
- ast.runtime_imports = runtime.Runtime.Imports{};
- ast.runtime_imports.@"$$m" = .{ .ref = Ref.atIndex(0), .primary = Ref.None, .backup = Ref.None };
- ast.runtime_imports.__export = .{ .ref = Ref.atIndex(1), .primary = Ref.None, .backup = Ref.None };
- ast.symbols = js_ast.Symbol.List.init(&json_ast_symbols);
- ast.module_ref = Ref.atIndex(2);
- ast.exports_ref = ast.runtime_imports.__export.?.ref;
- ast.bundle_export_ref = Ref.atIndex(3);
- } else {
- var parts = &[_]js_ast.Part{};
- ast = js_ast.Ast.initTest(parts);
- }
- },
- else => {
- return;
- },
- }
-
- switch (ast.parts.len) {
- // It can be empty after parsing too
- // A file like this is an example:
- // "//# sourceMappingURL=validator.js.map"
- 0 => {
- code_offset = try this.writeEmptyModule(module_data.package_path, module_id);
- },
- else => {
- const register_ref = ast.runtime_imports.@"$$m".?.ref;
- const E = js_ast.E;
- const Expr = js_ast.Expr;
- const Stmt = js_ast.Stmt;
-
- var prepend_part: js_ast.Part = undefined;
- var needs_prepend_part = false;
- if (ast.parts.len > 1) {
- for (ast.parts) |part| {
- if (part.tag != .none and part.stmts.len > 0) {
- prepend_part = part;
- needs_prepend_part = true;
- break;
- }
- }
- }
-
- var package_path = js_ast.E.String{ .data = module_data.package_path };
-
- var target_identifier = E.Identifier{ .ref = register_ref };
- var module_binding = js_ast.B.Identifier{ .ref = ast.module_ref.? };
- var exports_binding = js_ast.B.Identifier{ .ref = ast.exports_ref.? };
-
- var part = &ast.parts[ast.parts.len - 1];
-
- var new_stmts: [1]Stmt = undefined;
- var register_args: [1]Expr = undefined;
-
- var cjs_args: [2]js_ast.G.Arg = [_]js_ast.G.Arg{
- .{
- .binding = .{
- .loc = logger.Loc.Empty,
- .data = .{ .b_identifier = &module_binding },
- },
- },
- .{
- .binding = js_ast.Binding{
- .loc = logger.Loc.Empty,
- .data = .{ .b_identifier = &exports_binding },
- },
- },
- };
-
- const module_path_str = js_ast.Expr{ .data = .{ .e_string = &package_path }, .loc = logger.Loc.Empty };
-
- var closure = E.Arrow{
- .args = &cjs_args,
- .body = .{
- .loc = logger.Loc.Empty,
- .stmts = part.stmts,
- },
- };
-
- var properties: [1]js_ast.G.Property = [_]js_ast.G.Property{
- .{
- .key = module_path_str,
- .value = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_arrow = &closure } },
- },
- };
-
- var e_object = E.Object{
- .properties = js_ast.G.Property.List.init(&properties),
- };
- // if (!ast.uses_module_ref) {
- // var symbol = &ast.symbols[ast.module_ref.?.innerIndex()];
- // symbol.original_name = "_$$";
- // }
-
- // $$m(12345, "react", "index.js", function(module, exports) {
-
- // })
- var accessor = js_ast.E.Index{ .index = module_path_str, .target = js_ast.Expr{
- .data = .{ .e_object = &e_object },
- .loc = logger.Loc.Empty,
- } };
- register_args[0] = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_index = &accessor } };
-
- var call_register = E.Call{
- .target = Expr{
- .data = .{ .e_identifier = target_identifier },
- .loc = logger.Loc{ .start = 0 },
- },
- .args = js_ast.ExprNodeList.init(&register_args),
- };
- var register_expr = Expr{ .loc = call_register.target.loc, .data = .{ .e_call = &call_register } };
- var decls: [1]js_ast.G.Decl = undefined;
- var bundle_export_binding = js_ast.B.Identifier{ .ref = ast.runtime_imports.@"$$m".?.ref };
- var binding = js_ast.Binding{
- .loc = register_expr.loc,
- .data = .{ .b_identifier = &bundle_export_binding },
- };
- decls[0] = js_ast.G.Decl{
- .value = register_expr,
- .binding = binding,
- };
- var export_var = js_ast.S.Local{
- .decls = &decls,
- .is_export = true,
- };
- new_stmts[0] = Stmt{ .loc = register_expr.loc, .data = .{ .s_local = &export_var } };
- part.stmts = &new_stmts;
-
- var writer = js_printer.NewFileWriter(this.tmpfile);
- var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
-
- // It should only have one part.
- ast.parts = ast.parts[ast.parts.len - 1 ..];
- const write_result =
- // The difference between these two is `ascii_only`
- // We try our best to print UTF-8.
- // However, JavaScriptCore does not accept UTF-8.
- // It accepts either latin1 characters as unsigned char
- // or UTF-16 chars as uint16
- // We don't want to add a UTF decoding pass to the .bun files
- // because it's potentially 15 MB of code.
- // If we store it as UTF-16 directly, then that 15 MB of code becomes 30 MB of code!
- // lots of code!
- //
-
- if (!bundler.options.platform.isBun())
- try js_printer.printCommonJSThreaded(
- @TypeOf(writer),
- writer,
- ast,
- js_ast.Symbol.Map.initList(symbols),
- &source,
- false,
- js_printer.Options{
- .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref,
- .source_path = file_path,
- .externals = ast.externals,
- .indent = 0,
- .require_ref = ast.require_ref,
- .module_hash = module_id,
- .runtime_imports = ast.runtime_imports,
- .prepend_part_value = &prepend_part,
- .prepend_part_key = if (needs_prepend_part) closure.body.stmts.ptr else null,
- },
- Linker,
- &bundler.linker,
- &this.write_lock,
- std.fs.File,
- this.tmpfile,
- std.fs.File.getPos,
- &this.tmpfile_byte_offset,
- )
- else
- try js_printer.printCommonJSThreaded(
- @TypeOf(writer),
- writer,
- ast,
- js_ast.Symbol.Map.initList(symbols),
- &source,
- true,
- js_printer.Options{
- .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref,
- .source_path = file_path,
- .externals = ast.externals,
- .indent = 0,
- .require_ref = ast.require_ref,
- .module_hash = module_id,
- .runtime_imports = ast.runtime_imports,
- .prepend_part_value = &prepend_part,
- .prepend_part_key = if (needs_prepend_part) closure.body.stmts.ptr else null,
- },
- Linker,
- &bundler.linker,
- &this.write_lock,
- std.fs.File,
- this.tmpfile,
- std.fs.File.getPos,
- &this.tmpfile_byte_offset,
- );
-
- code_offset = write_result.off;
- },
- }
- }
-
- if (comptime Environment.isDebug) {
- Output.prettyln("{s}@{s}/{s} - {d}:{d} \n", .{ package.name, package.version, package_relative_path, package.hash, module_id });
- Output.flush();
- std.debug.assert(package_relative_path.len > 0);
- }
-
- try this.appendToModuleList(
- package,
- module_id,
- code_offset,
- package_relative_path,
- );
- } else {
- // If it's app code, scan but do not fully parse.
- switch (loader) {
- .jsx,
- .tsx,
- .js,
- .ts,
- => {
- const entry = bundler.resolver.caches.fs.readFileShared(
- bundler.fs,
- file_path.textZ(),
- resolve.dirname_fd,
- if (resolve.file_fd != 0) resolve.file_fd else null,
- shared_buffer,
- ) catch return;
- if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) return;
-
- const source = logger.Source.initRecycledFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
-
- var jsx = bundler.options.jsx;
-
- jsx.parse = loader.isJSX();
- var opts = js_parser.Parser.Options.init(jsx, loader);
- opts.macro_context = &worker.data.macro_context;
- opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript();
-
- try bundler.resolver.caches.js.scan(
- bundler.allocator,
- scan_pass_result,
- opts,
- bundler.options.define,
- log,
- &source,
- );
- worker.data.estimated_input_lines_of_code += scan_pass_result.approximate_newline_count;
-
- {
- for (scan_pass_result.import_records.items) |*import_record| {
- if (import_record.is_internal or import_record.is_unused) {
- continue;
- }
-
- if (bundler.options.platform.isBun()) {
- if (JSC.DisabledModule.has(import_record.path.text)) {
- import_record.path.is_disabled = true;
- import_record.is_legacy_bundled = true;
- continue;
- }
-
- if (JSC.HardcodedModule.Aliases.get(import_record.path.text)) |remapped| {
- import_record.path.text = remapped.path;
- import_record.tag = remapped.tag;
- if (remapped.tag != .none) {
- import_record.is_legacy_bundled = false;
- continue;
- }
- }
- }
-
- var tmp = bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind);
- if (tmp) |*_resolved_import| {
- if (_resolved_import.is_external) {
- continue;
- }
-
- var path = _resolved_import.path() orelse continue;
-
- const loader_ = this.bundler.options.loader(path.name.ext);
-
- path.* = try path.dupeAlloc(this.allocator);
-
- if (BundledModuleData.get(this, _resolved_import)) |mod| {
- if (comptime !FeatureFlags.bundle_dynamic_import) {
- if (import_record.kind == .dynamic)
- continue;
- } else {
- // When app code dynamically imports a large file
- // Don't bundle it. Leave it as a separate file.
- // The main value from bundling in development is to minimize tiny, waterfall http requests
- // If you're importing > 100 KB file dynamically, developer is probably explicitly trying to do that.
- // There's a tradeoff between "I want to minimize page load time"
- if (import_record.kind == .dynamic) {
- this.dynamic_import_file_size_store_lock.lock();
- defer this.dynamic_import_file_size_store_lock.unlock();
- var dynamic_import_file_size = this.dynamic_import_file_size_store.getOrPut(mod.module_id) catch unreachable;
- if (!dynamic_import_file_size.found_existing) {
- var fd = _resolved_import.file_fd;
- var can_close = false;
- if (fd == 0) {
- dynamic_import_file_size.value_ptr.* = 0;
- fd = (std.fs.openFileAbsolute(path.textZ(), .{}) catch |err| {
- this.log.addRangeWarningFmt(
- &source,
- import_record.range,
- worker.allocator,
- "{s} opening file: \"{s}\"",
- .{ @errorName(err), path.text },
- ) catch unreachable;
- continue;
- }).handle;
- can_close = true;
- Fs.FileSystem.setMaxFd(fd);
- }
-
- defer {
- if (can_close and bundler.fs.fs.needToCloseFiles()) {
- var _file = std.fs.File{ .handle = fd };
- _file.close();
- _resolved_import.file_fd = 0;
- } else if (FeatureFlags.store_file_descriptors) {
- _resolved_import.file_fd = fd;
- }
- }
-
- var file = std.fs.File{ .handle = fd };
- var stat = file.stat() catch |err| {
- this.log.addRangeWarningFmt(
- &source,
- import_record.range,
- worker.allocator,
- "{s} stat'ing file: \"{s}\"",
- .{ @errorName(err), path.text },
- ) catch unreachable;
- dynamic_import_file_size.value_ptr.* = 0;
- continue;
- };
-
- dynamic_import_file_size.value_ptr.* = @truncate(u32, stat.size);
- }
-
- if (dynamic_import_file_size.value_ptr.* > 1024 * 100)
- continue;
- }
- }
-
- std.debug.assert(mod.module_id != 0);
- _ = queue.getOrPutValue(
- mod.module_id,
- .{
- .resolution = _resolved_import.*,
- },
- ) catch unreachable;
- } else {
- _ = queue.getOrPutValue(
- _resolved_import.hash(
- this.bundler.fs.top_level_dir,
- loader_,
- ),
- .{
- .resolution = _resolved_import.*,
- },
- ) catch unreachable;
- }
- } else |err| {
- switch (err) {
- error.ModuleNotFound => {
- if (!import_record.handles_import_errors) {
- const addError = logger.Log.addResolveErrorWithTextDupeMaybeWarn;
- if (isPackagePath(import_record.path.text)) {
- if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve Node.js builtin: \"{s}\".",
- .{import_record.path.text},
- import_record.kind,
- platform.isBun(),
- );
- } else {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?",
- .{import_record.path.text},
- import_record.kind,
- platform.isBun(),
- );
- }
- } else if (!platform.isBun()) {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve: \"{s}\"",
- .{
- import_record.path.text,
- },
- import_record.kind,
- platform.isBun(),
- );
- }
- }
- },
- // assume other errors are already in the log
- else => {},
- }
- }
- }
- }
- },
- else => {},
- }
- }
-
- if (queue.count() > 0) {
- try this.queue.writeItem(queue);
- }
-}
diff --git a/src/cli.zig b/src/cli.zig
index e5065fd79..61e236bad 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -165,6 +165,10 @@ pub const Arguments = struct {
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx, ts, css") catch unreachable,
clap.parseParam("-u, --origin <STR> Rewrite import URLs to start with --origin. Default: \"\"") catch unreachable,
clap.parseParam("-p, --port <STR> Port to serve bun's dev server on. Default: \"3000\"") catch unreachable,
+ clap.parseParam("--minify Minify (experimental)") catch unreachable,
+ clap.parseParam("--minify-syntax Minify syntax and inline data (experimental)") catch unreachable,
+ clap.parseParam("--minify-whitespace Minify whitespace (experimental)") catch unreachable,
+ clap.parseParam("--minify-identifiers Minify identifiers") catch unreachable,
clap.parseParam("<POS>... ") catch unreachable,
};
@@ -208,8 +212,6 @@ pub const Arguments = struct {
clap.parseParam("--server-components Enable React Server Components (experimental)") catch unreachable,
clap.parseParam("--splitting Split up code!") catch unreachable,
clap.parseParam("--transform Do not bundle") catch unreachable,
- clap.parseParam("--minify-syntax Minify syntax and inline data (experimental)") catch unreachable,
- clap.parseParam("--minify-whitespace Minify whitespace (experimental)") catch unreachable,
};
// TODO: update test completions
@@ -476,10 +478,14 @@ pub const Arguments = struct {
const production = false;
var output_file: ?string = null;
+ const minify_flag = args.flag("--minify");
+ ctx.bundler_options.minify_syntax = minify_flag or args.flag("--minify-syntax");
+ ctx.bundler_options.minify_whitespace = minify_flag or args.flag("--minify-whitespace");
+ ctx.bundler_options.minify_identifiers = minify_flag or args.flag("--minify-identifiers");
+
if (cmd == .BuildCommand) {
ctx.bundler_options.transform_only = args.flag("--transform");
- ctx.bundler_options.minify_syntax = args.flag("--minify-syntax");
- ctx.bundler_options.minify_whitespace = args.flag("--minify-whitespace");
+
if (args.option("--outdir")) |outdir| {
if (outdir.len > 0) {
ctx.bundler_options.outdir = outdir;
@@ -922,6 +928,7 @@ pub const Command = struct {
transform_only: bool = false,
minify_syntax: bool = false,
minify_whitespace: bool = false,
+ minify_identifiers: bool = false,
};
const _ctx = Command.Context{
diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig
index 0f0cd2609..33ab05632 100644
--- a/src/cli/build_command.zig
+++ b/src/cli/build_command.zig
@@ -27,7 +27,6 @@ const resolve_path = @import("../resolver/resolve_path.zig");
const configureTransformOptionsForBun = @import("../bun.js/config.zig").configureTransformOptionsForBun;
const bundler = bun.bundler;
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
-const GenerateNodeModuleBundle = @import("../bundler/generate_node_modules_bundle.zig");
const DotEnv = @import("../env_loader.zig");
const fs = @import("../fs.zig");
@@ -53,12 +52,16 @@ pub const BuildCommand = struct {
this_bundler.resolver.opts.react_server_components = ctx.bundler_options.react_server_components;
this_bundler.options.code_splitting = ctx.bundler_options.code_splitting;
this_bundler.resolver.opts.code_splitting = ctx.bundler_options.code_splitting;
+
this_bundler.options.minify_syntax = ctx.bundler_options.minify_syntax;
- this_bundler.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace;
+ this_bundler.resolver.opts.minify_syntax = ctx.bundler_options.minify_syntax;
this_bundler.options.minify_whitespace = ctx.bundler_options.minify_whitespace;
this_bundler.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace;
+ this_bundler.options.minify_identifiers = ctx.bundler_options.minify_identifiers;
+ this_bundler.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers;
+
this_bundler.configureLinker();
// This step is optional
@@ -99,6 +102,8 @@ pub const BuildCommand = struct {
if (ctx.bundler_options.transform_only) {
this_bundler.linker.options.resolve_mode = .lazy;
this_bundler.options.import_path_format = .relative;
+ this_bundler.options.allow_runtime = false;
+ this_bundler.resolver.opts.allow_runtime = false;
// TODO: refactor this .transform function
const result = try this_bundler.transform(
diff --git a/src/env_loader.zig b/src/env_loader.zig
index 76a8d293d..30c9dc2cd 100644
--- a/src/env_loader.zig
+++ b/src/env_loader.zig
@@ -924,15 +924,9 @@ pub const Map = struct {
pub fn cloneToEnvMap(this: *Map, allocator: std.mem.Allocator) !std.process.EnvMap {
var env_map = std.process.EnvMap.init(allocator);
- const Convert = struct {
- pub fn constStrToU8(s: string) []u8 {
- return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
- }
- };
-
var iter_ = this.map.iterator();
while (iter_.next()) |entry| {
- try env_map.putMove(Convert.constStrToU8(entry.key_ptr.*), Convert.constStrToU8(entry.value_ptr.*));
+ try env_map.putMove(bun.constStrToU8(entry.key_ptr.*), bun.constStrToU8(entry.value_ptr.*));
}
return env_map;
diff --git a/src/http.zig b/src/http.zig
index a030aed80..08ecbda92 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -44,9 +44,7 @@ const ObjectPool = @import("./pool.zig").ObjectPool;
const Lock = @import("./lock.zig").Lock;
const RequestDataPool = ObjectPool([32_000]u8, null, false, 1);
const ResolveWatcher = @import("./resolver/resolver.zig").ResolveWatcher;
-pub fn constStrToU8(s: string) []u8 {
- return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
-}
+const constStrToU8 = bun.constStrToU8;
pub const MutableStringAPIWriter = NewApiWriter(*MutableString);
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 40675805f..451d01a1b 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -552,10 +552,190 @@ pub const ClauseItem = struct {
pub const default_alias: string = "default";
};
+pub const SlotCounts = struct {
+ slots: Symbol.SlotNamespace.CountsArray = Symbol.SlotNamespace.CountsArray.initFill(0),
+
+ pub fn unionMax(this: *SlotCounts, other: SlotCounts) void {
+ for (&this.slots.values, other.slots.values) |*a, b| {
+ if (a.* < b) a.* = b;
+ }
+ }
+};
+
+pub const CharAndCount = struct {
+ char: u8 = 0,
+ count: i32 = 0,
+ index: usize = 0,
+
+ pub const Array = [64]CharAndCount;
+
+ pub fn lessThan(_: void, a: CharAndCount, b: CharAndCount) bool {
+ return a.count > b.count or (a.count == b.count and a.index < b.index);
+ }
+};
+
+pub const CharFreq = struct {
+ const Vector = @Vector(64, i32);
+ const Buffer = [64]i32;
+
+ freqs: Buffer align(@alignOf(Vector)) = undefined,
+
+ const scan_big_chunk_size = 32;
+ pub fn scan(this: *CharFreq, text: string, delta: i32) void {
+ if (delta == 0)
+ return;
+
+ if (text.len < scan_big_chunk_size) {
+ scanSmall(&this.freqs, text, delta);
+ } else {
+ scanBig(&this.freqs, text, delta);
+ }
+ }
+
+ fn scanBig(out: *align(@alignOf(Vector)) Buffer, text: string, delta: i32) void {
+ // https://zig.godbolt.org/z/P5dPojWGK
+ var freqs = out.*;
+ defer out.* = freqs;
+ var deltas: [255]i32 = [_]i32{0} ** 255;
+ var remain = text;
+
+ std.debug.assert(remain.len >= scan_big_chunk_size);
+
+ const unrolled = remain.len - (remain.len % scan_big_chunk_size);
+ var remain_end = remain.ptr + unrolled;
+ var unrolled_ptr = remain.ptr;
+ remain = remain[unrolled..];
+
+ while (unrolled_ptr != remain_end) : (unrolled_ptr += scan_big_chunk_size) {
+ const chunk = unrolled_ptr[0..scan_big_chunk_size].*;
+ comptime var i: usize = 0;
+ inline while (i < scan_big_chunk_size) : (i += scan_big_chunk_size) {
+ deltas[@as(usize, chunk[i])] += delta;
+ }
+ }
+
+ for (remain) |c| {
+ deltas[@as(usize, c)] += delta;
+ }
+
+ freqs[0..26].* = deltas['a' .. 'a' + 26].*;
+ freqs[26 .. 26 * 2].* = deltas['A' .. 'A' + 26].*;
+ freqs[26 * 2 .. 62].* = deltas['0' .. '0' + 10].*;
+ freqs[62] = deltas['_'];
+ freqs[63] = deltas['$'];
+ }
+
+ fn scanSmall(out: *align(@alignOf(Vector)) [64]i32, text: string, delta: i32) void {
+ var freqs: [64]i32 = out.*;
+ defer out.* = freqs;
+
+ for (text) |c| {
+ const i: usize = switch (c) {
+ 'a'...'z' => @intCast(usize, c) - 'a',
+ 'A'...'Z' => @intCast(usize, c) - ('A' - 26),
+ '0'...'9' => @intCast(usize, c) + (53 - '0'),
+ '_' => 62,
+ '$' => 63,
+ else => continue,
+ };
+ freqs[i] += delta;
+ }
+ }
+
+ pub fn include(this: *CharFreq, other: CharFreq) void {
+ // https://zig.godbolt.org/z/Mq8eK6K9s
+ var left: @Vector(64, i32) = this.freqs;
+ defer this.freqs = left;
+ const right: @Vector(64, i32) = other.freqs;
+
+ left += right;
+ }
+
+ pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier {
+ var array: CharAndCount.Array = brk: {
+ var _array: CharAndCount.Array = undefined;
+ const freqs = this.freqs;
+
+ for (&_array, NameMinifier.default_tail, &freqs, 0..) |*dest, char, freq, i| {
+ dest.* = CharAndCount{
+ .char = char,
+ .index = i,
+ .count = freq,
+ };
+ }
+ break :brk _array;
+ };
+
+ std.sort.sort(CharAndCount, &array, {}, CharAndCount.lessThan);
+
+ var minifier = NameMinifier.init(allocator);
+ minifier.head.ensureTotalCapacityPrecise(NameMinifier.default_head.len) catch unreachable;
+ minifier.tail.ensureTotalCapacityPrecise(NameMinifier.default_tail.len) catch unreachable;
+ // TODO: investigate counting number of < 0 and > 0 and pre-allocating
+ for (array) |item| {
+ if (item.char < '0' or item.char > '9') {
+ minifier.head.append(item.char) catch unreachable;
+ }
+ minifier.tail.append(item.char) catch unreachable;
+ }
+
+ return minifier;
+ }
+};
+
+pub const NameMinifier = struct {
+ head: std.ArrayList(u8),
+ tail: std.ArrayList(u8),
+
+ pub const default_head = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_$";
+ pub const default_tail = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$";
+
+ pub fn init(allocator: std.mem.Allocator) NameMinifier {
+ return .{
+ .head = std.ArrayList(u8).init(allocator),
+ .tail = std.ArrayList(u8).init(allocator),
+ };
+ }
+
+ pub fn numberToMinifiedName(this: *NameMinifier, name: *std.ArrayList(u8), _i: isize) !void {
+ name.clearRetainingCapacity();
+ var i = _i;
+ var j = @intCast(usize, @mod(i, 54));
+ try name.appendSlice(this.head.items[j .. j + 1]);
+ i = @divFloor(i, 54);
+
+ while (i > 0) {
+ i -= 1;
+ j = @intCast(usize, @mod(i, 64));
+ try name.appendSlice(this.tail.items[j .. j + 1]);
+ i = @divFloor(i, 64);
+ }
+ }
+
+ pub fn defaultNumberToMinifiedName(allocator: std.mem.Allocator, _i: isize) !string {
+ var i = _i;
+ var j = @intCast(usize, @mod(i, 54));
+ var name = std.ArrayList(u8).init(allocator);
+ try name.appendSlice(default_head[j .. j + 1]);
+ i = @divFloor(i, 54);
+
+ while (i > 0) {
+ i -= 1;
+ j = @intCast(usize, @mod(i, 64));
+ try name.appendSlice(default_tail[j .. j + 1]);
+ i = @divFloor(i, 64);
+ }
+
+ return name.items;
+ }
+};
+
pub const G = struct {
pub const Decl = struct {
binding: BindingNodeIndex,
value: ?ExprNodeIndex = null,
+
+ pub const List = BabyList(Decl);
};
pub const NamespaceAlias = struct {
@@ -708,6 +888,10 @@ pub const Symbol = struct {
/// Do not use this directly. Use `nestedScopeSlot()` instead.
nested_scope_slot: u32 = invalid_nested_scope_slot,
+ did_keep_name: bool = true,
+
+ must_start_with_capital_letter_for_jsx: bool = false,
+
/// The kind of symbol. This is used to determine how to print the symbol
/// and how to deal with conflicts, renaming, etc.
kind: Kind = Kind.other,
@@ -804,14 +988,16 @@ pub const Symbol = struct {
u0 = 0,
const invalid_chunk_index = std.math.maxInt(u32);
- const invalid_nested_scope_slot = std.math.maxInt(u32);
+ pub const invalid_nested_scope_slot = std.math.maxInt(u32);
pub const SlotNamespace = enum {
+ must_not_be_renamed,
default,
label,
private_name,
mangled_prop,
- must_not_be_renamed,
+
+ pub const CountsArray = std.EnumArray(SlotNamespace, u32);
};
/// This is for generating cross-chunk imports and exports for code splitting.
@@ -5071,7 +5257,7 @@ pub const S = struct {
pub const Local = struct {
kind: Kind = Kind.k_var,
- decls: []G.Decl,
+ decls: G.Decl.List = .{},
is_export: bool = false,
// The TypeScript compiler doesn't generate code for "import foo = bar"
// statements where the import is never used.
@@ -5408,6 +5594,8 @@ pub const Ast = struct {
has_lazy_export: bool = false,
runtime_imports: Runtime.Imports = .{},
+ nested_scope_slot_counts: SlotCounts = SlotCounts{},
+
runtime_import_record_id: ?u32 = null,
needs_runtime: bool = false,
externals: []u32 = &[_]u32{},
@@ -5439,7 +5627,7 @@ pub const Ast = struct {
// This list may be mutated later, so we should store the capacity
symbols: Symbol.List = Symbol.List{},
module_scope: Scope = Scope{},
- // char_freq: *CharFreq,
+ char_freq: ?CharFreq = null,
exports_ref: Ref = Ref.None,
module_ref: Ref = Ref.None,
wrapper_ref: Ref = Ref.None,
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index 8ad3cdbba..8f285396d 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -144,7 +144,6 @@ fn NewLexer_(
is_legacy_octal_literal: bool = false,
is_log_disabled: bool = false,
comments_to_preserve_before: std.ArrayList(js_ast.G.Comment),
- all_original_comments: ?[]js_ast.G.Comment = null,
code_point: CodePoint = -1,
identifier: []const u8 = "",
jsx_pragma: JSXPragma = .{},
@@ -165,6 +164,8 @@ fn NewLexer_(
/// Only used for JSON stringification when bundling
/// This is a zero-bit type unless we're parsing JSON.
is_ascii_only: JSONBool = JSONBoolDefault,
+ track_comments: bool = false,
+ all_comments: std.ArrayList(logger.Range),
pub fn clone(self: *const LexerType) LexerType {
return LexerType{
@@ -183,7 +184,6 @@ fn NewLexer_(
.is_legacy_octal_literal = self.is_legacy_octal_literal,
.is_log_disabled = self.is_log_disabled,
.comments_to_preserve_before = self.comments_to_preserve_before,
- .all_original_comments = self.all_original_comments,
.code_point = self.code_point,
.identifier = self.identifier,
.regex_flags_start = self.regex_flags_start,
@@ -198,6 +198,7 @@ fn NewLexer_(
.string_literal = self.string_literal,
.string_literal_is_ascii = self.string_literal_is_ascii,
.is_ascii_only = self.is_ascii_only,
+ .all_comments = self.all_comments,
};
}
@@ -276,7 +277,10 @@ fn NewLexer_(
return @enumToInt(lexer.token) >= @enumToInt(T.t_identifier);
}
- pub fn deinit(_: *LexerType) void {}
+ pub fn deinit(this: *LexerType) void {
+ this.all_comments.clearAndFree();
+ this.comments_to_preserve_before.clearAndFree();
+ }
fn decodeEscapeSequences(lexer: *LexerType, start: usize, text: string, comptime BufType: type, buf_: *BufType) !void {
var buf = buf_.*;
@@ -1816,6 +1820,11 @@ fn NewLexer_(
const has_legal_annotation = text.len > 2 and text[2] == '!';
const is_multiline_comment = text.len > 1 and text[1] == '*';
+ if (lexer.track_comments)
+ // Save the original comment text so we can subtract comments from the
+ // character frequency analysis used by symbol minification
+ lexer.all_comments.append(lexer.range()) catch unreachable;
+
// Omit the trailing "*/" from the checks below
const end_comment_text =
if (is_multiline_comment)
@@ -1974,6 +1983,7 @@ fn NewLexer_(
.string_literal_is_ascii = true,
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
+ .all_comments = std.ArrayList(logger.Range).init(allocator),
};
lex.step();
try lex.next();
@@ -1991,6 +2001,7 @@ fn NewLexer_(
.prev_error_loc = logger.Loc.Empty,
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
+ .all_comments = std.ArrayList(logger.Range).init(allocator),
};
lex.step();
try lex.next();
@@ -2008,6 +2019,7 @@ fn NewLexer_(
.prev_error_loc = logger.Loc.Empty,
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
+ .all_comments = std.ArrayList(logger.Range).init(allocator),
};
}
diff --git a/src/js_parser.zig b/src/js_parser.zig
index 8147ec947..34db0e5ac 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -1126,7 +1126,7 @@ pub const ImportScanner = struct {
},
.s_local => |st| {
if (st.is_export) {
- for (st.decls) |decl| {
+ for (st.decls.slice()) |decl| {
p.recordExportedBinding(decl.binding);
}
}
@@ -1134,7 +1134,7 @@ pub const ImportScanner = struct {
// Remove unused import-equals statements, since those likely
// correspond to types instead of values
if (st.was_ts_import_equals and !st.is_export and st.decls.len > 0) {
- var decl = st.decls[0];
+ var decl = st.decls.ptr[0];
// Skip to the underlying reference
var value = decl.value;
@@ -1234,7 +1234,7 @@ pub const ImportScanner = struct {
decls[0] = G.Decl{ .binding = p.b(B.Identifier{ .ref = st.default_name.ref.? }, stmt.loc), .value = ex };
stmt = p.s(S.Local{
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
.kind = S.Local.Kind.k_var,
.is_export = false,
}, ex.loc);
@@ -1254,7 +1254,7 @@ pub const ImportScanner = struct {
decls[0] = G.Decl{ .binding = p.b(B.Identifier{ .ref = st.default_name.ref.? }, stmt.loc), .value = p.newExpr(E.Function{ .func = func.func }, stmt.loc) };
stmt = p.s(S.Local{
- .decls = decls,
+ .decls = Decl.List.init(decls),
.kind = S.Local.Kind.k_var,
.is_export = false,
}, stmt.loc);
@@ -1283,7 +1283,7 @@ pub const ImportScanner = struct {
};
stmt = p.s(S.Local{
- .decls = decls,
+ .decls = Decl.List.init(decls),
.kind = S.Local.Kind.k_var,
.is_export = false,
}, stmt.loc);
@@ -1787,18 +1787,18 @@ pub const SideEffects = enum(u1) {
// Omit everything except the identifiers
// common case: single var foo = blah, don't need to allocate
- if (local.decls.len == 1 and local.decls[0].binding.data == .b_identifier) {
- const prev = local.decls[0];
- stmt.data.s_local.decls[0] = G.Decl{ .binding = prev.binding };
+ if (local.decls.len == 1 and local.decls.ptr[0].binding.data == .b_identifier) {
+ const prev = local.decls.ptr[0];
+ stmt.data.s_local.decls.ptr[0] = G.Decl{ .binding = prev.binding };
return true;
}
var decls = std.ArrayList(G.Decl).initCapacity(allocator, local.decls.len) catch unreachable;
- for (local.decls) |decl| {
+ for (local.decls.slice()) |decl| {
findIdentifiers(decl.binding, &decls);
}
- local.decls = decls.toOwnedSlice() catch @panic("TODO");
+ local.decls.update(decls);
return true;
},
@@ -2194,7 +2194,7 @@ const IdentifierOpts = packed struct {
};
fn statementCaresAboutScope(stmt: Stmt) bool {
- switch (stmt.data) {
+ return switch (stmt.data) {
.s_block,
.s_empty,
.s_debugger,
@@ -2213,20 +2213,12 @@ fn statementCaresAboutScope(stmt: Stmt) bool {
.s_break,
.s_continue,
.s_directive,
- => {
- return false;
- },
- // This is technically incorrect.
- // var does not care about the scope
- // However, we are choosing _not_ to relocate vars to the top level
+ .s_label,
+ => false,
- .s_local => |local| {
- return local.kind != .k_var;
- },
- else => {
- return true;
- },
- }
+ .s_local => |local| local.kind != .k_var,
+ else => true,
+ };
}
const ExprIn = struct {
@@ -2672,6 +2664,8 @@ pub const Parser = struct {
module_type: options.ModuleType = .unknown,
+ transform_only: bool = false,
+
pub fn init(jsx: options.JSX.Pragma, loader: options.Loader) Options {
var opts = Options{
.ts = loader.isTypeScript(),
@@ -2767,6 +2761,7 @@ pub const Parser = struct {
pub fn toLazyExportAST(this: *Parser, expr: Expr, comptime runtime_api_call: []const u8) !js_ast.Result {
var p: JavaScriptParser = undefined;
try JavaScriptParser.init(this.allocator, this.log, this.source, this.define, this.lexer, this.options, &p);
+ p.lexer.track_comments = this.options.features.minify_identifiers;
p.should_fold_typescript_constant_expressions = this.options.features.should_fold_typescript_constant_expressions;
defer p.lexer.deinit();
var result: js_ast.Result = undefined;
@@ -2925,14 +2920,14 @@ pub const Parser = struct {
switch (stmt.data) {
.s_local => |local| {
if (local.decls.len > 1) {
- for (local.decls) |decl| {
+ for (local.decls.slice()) |decl| {
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
sliced.items.len = 1;
var _local = local.*;
var list = try ListManaged(G.Decl).initCapacity(p.allocator, 1);
list.items.len = 1;
list.items[0] = decl;
- _local.decls = list.items;
+ _local.decls.update(list);
sliced.items[0] = p.s(_local, stmt.loc);
try p.appendPart(&parts, sliced.items);
}
@@ -3051,7 +3046,7 @@ pub const Parser = struct {
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
part_stmts[0] = p.s(S.Local{
.kind = .k_var,
- .decls = decls,
+ .decls = Decl.List.init(decls),
}, logger.Loc.Empty);
before.append(js_ast.Part{
.stmts = part_stmts,
@@ -3116,7 +3111,7 @@ pub const Parser = struct {
.kind = .k_var,
.is_export = false,
.was_commonjs_export = true,
- .decls = decls,
+ .decls = Decl.List.init(decls),
},
export_ref.loc_ref.loc,
);
@@ -3289,7 +3284,7 @@ pub const Parser = struct {
exports_kind = .esm;
} else if (uses_exports_ref or uses_module_ref or p.has_top_level_return) {
exports_kind = .cjs;
- if (!p.options.bundle) {
+ if (!p.options.bundle and (!p.options.transform_only or p.options.features.dynamic_require)) {
if (p.options.legacy_transform_require_to_import or (p.options.features.dynamic_require and !p.options.enable_legacy_bundling)) {
var args = p.allocator.alloc(Expr, 2) catch unreachable;
@@ -5240,7 +5235,7 @@ fn NewParser_(
switch (stmt.data) {
.s_local => |local| {
if (local.is_export) break :can_remove_part false;
- for (local.decls) |decl| {
+ for (local.decls.slice()) |decl| {
if (isBindingUsed(p, decl.binding, default_export_ref))
break :can_remove_part false;
}
@@ -5386,6 +5381,60 @@ fn NewParser_(
}
}
+ fn computeCharacterFrequency(p: *P) ?js_ast.CharFreq {
+ if (!p.options.features.minify_identifiers or (p.options.bundle and p.source.index.isRuntime())) {
+ return null;
+ }
+
+ // Add everything in the file to the histogram
+ var freq: js_ast.CharFreq = .{
+ .freqs = [_]i32{0} ** 64,
+ };
+
+ freq.scan(p.source.contents, 1);
+
+ // Subtract out all comments
+ for (p.lexer.all_comments.items) |comment_range| {
+ freq.scan(p.source.textForRange(comment_range), -1);
+ }
+
+ // Subtract out all import paths
+ for (p.import_records.items) |record| {
+ freq.scan(record.path.text, -1);
+ }
+
+ const ScopeVisitor = struct {
+ pub fn visit(symbols: []const js_ast.Symbol, char_freq: *js_ast.CharFreq, scope: *js_ast.Scope) void {
+ var iter = scope.members.iterator();
+
+ while (iter.next()) |entry| {
+ const symbol: *const Symbol = &symbols[entry.value_ptr.ref.innerIndex()];
+
+ if (symbol.slotNamespace() != .must_not_be_renamed) {
+ char_freq.scan(symbol.original_name, -@intCast(i32, symbol.use_count_estimate));
+ }
+ }
+
+ if (scope.label_ref) |ref| {
+ const symbol = &symbols[ref.innerIndex()];
+
+ if (symbol.slotNamespace() != .must_not_be_renamed) {
+ char_freq.scan(symbol.original_name, -@intCast(i32, symbol.use_count_estimate) - 1);
+ }
+ }
+
+ for (scope.children.slice()) |child| {
+ visit(symbols, char_freq, child);
+ }
+ }
+ };
+ ScopeVisitor.visit(p.symbols.items, &freq, p.module_scope);
+
+ // TODO: mangledProps
+
+ return freq;
+ }
+
pub fn newExpr(p: *P, t: anytype, loc: logger.Loc) Expr {
const Type = @TypeOf(t);
@@ -5808,7 +5857,7 @@ fn NewParser_(
},
.s_local => |local| {
if (local.decls.len > 0) {
- var first: *Decl = &local.decls[0];
+ var first: *Decl = &local.decls.ptr[0];
if (first.value) |*value| {
if (first.binding.data == .b_identifier) {
break :brk value;
@@ -8914,7 +8963,7 @@ fn NewParser_(
try p.lexer.next();
const decls = try p.parseAndDeclareDecls(.hoisted, opts);
try p.lexer.expectOrInsertSemicolon();
- return p.s(S.Local{ .kind = .k_var, .decls = decls, .is_export = opts.is_export }, loc);
+ return p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
},
.t_const => {
if (opts.lexical_decl != .allow_all) {
@@ -8932,12 +8981,12 @@ fn NewParser_(
try p.lexer.expectOrInsertSemicolon();
if (!opts.is_typescript_declare) {
- try p.requireInitializers(decls);
+ try p.requireInitializers(decls.items);
}
// When HMR is enabled, replace all const/let exports with var
const kind = if (p.options.features.hot_module_reloading and opts.is_export) S.Local.Kind.k_var else S.Local.Kind.k_const;
- return p.s(S.Local{ .kind = kind, .decls = decls, .is_export = opts.is_export }, loc);
+ return p.s(S.Local{ .kind = kind, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
},
.t_if => {
try p.lexer.next();
@@ -9168,7 +9217,7 @@ fn NewParser_(
bad_let_range = p.lexer.range();
}
- var decls: []G.Decl = &([_]G.Decl{});
+ var decls: G.Decl.List = .{};
var init_loc = p.lexer.loc();
var is_var = false;
switch (p.lexer.token) {
@@ -9177,15 +9226,15 @@ fn NewParser_(
is_var = true;
try p.lexer.next();
var stmtOpts = ParseStatementOptions{};
- decls = try p.parseAndDeclareDecls(.hoisted, &stmtOpts);
- init_ = p.s(S.Local{ .kind = .k_var, .decls = decls }, init_loc);
+ decls.update(try p.parseAndDeclareDecls(.hoisted, &stmtOpts));
+ init_ = p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls) }, init_loc);
},
// for (const )
.t_const => {
try p.lexer.next();
var stmtOpts = ParseStatementOptions{};
- decls = try p.parseAndDeclareDecls(.cconst, &stmtOpts);
- init_ = p.s(S.Local{ .kind = .k_const, .decls = decls }, init_loc);
+ decls.update(try p.parseAndDeclareDecls(.cconst, &stmtOpts));
+ init_ = p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls) }, init_loc);
},
// for (;)
.t_semicolon => {},
@@ -9226,7 +9275,7 @@ fn NewParser_(
}
}
- try p.forbidInitializers(decls, "of", false);
+ try p.forbidInitializers(decls.slice(), "of", false);
try p.lexer.next();
const value = try p.parseExpr(.comma);
try p.lexer.expect(.t_close_paren);
@@ -9237,7 +9286,7 @@ fn NewParser_(
// Detect for-in loops
if (p.lexer.token == .t_in) {
- try p.forbidInitializers(decls, "in", is_var);
+ try p.forbidInitializers(decls.slice(), "in", is_var);
try p.lexer.next();
const value = try p.parseExpr(.lowest);
try p.lexer.expect(.t_close_paren);
@@ -9251,7 +9300,7 @@ fn NewParser_(
switch (init_stmt.data) {
.s_local => {
if (init_stmt.data.s_local.kind == .k_const) {
- try p.requireInitializers(decls);
+ try p.requireInitializers(decls.slice());
}
},
else => {},
@@ -9662,14 +9711,14 @@ fn NewParser_(
// of the declared bindings. That "export var" statement will later
// cause identifiers to be transformed into property accesses.
if (opts.is_namespace_scope and opts.is_export) {
- var decls: []G.Decl = &([_]G.Decl{});
+ var decls: G.Decl.List = .{};
switch (stmt.data) {
.s_local => |local| {
var _decls = try ListManaged(G.Decl).initCapacity(p.allocator, local.decls.len);
- for (local.decls) |decl| {
+ for (local.decls.slice()) |decl| {
try extractDeclsForBinding(decl.binding, &_decls);
}
- decls = _decls.items;
+ decls.update(_decls);
},
else => {},
}
@@ -9954,7 +10003,7 @@ fn NewParser_(
.binding = p.b(B.Identifier{ .ref = ref }, default_name_loc),
.value = value,
};
- return p.s(S.Local{ .kind = kind, .decls = decls, .is_export = opts.is_export, .was_ts_import_equals = true }, loc);
+ return p.s(S.Local{ .kind = kind, .decls = Decl.List.init(decls), .is_export = opts.is_export, .was_ts_import_equals = true }, loc);
}
fn parseClauseAlias(p: *P, kind: string) !string {
@@ -10180,11 +10229,11 @@ fn NewParser_(
.stmt = p.s(S.Local{
// Replace all "export let" with "export var" when HMR is enabled
.kind = if (opts.is_export and p.options.features.hot_module_reloading) .k_var else .k_let,
- .decls = decls,
+ .decls = G.Decl.List.fromList(decls),
.is_export = opts.is_export,
}, let_range.loc),
},
- .decls = decls,
+ .decls = decls.items,
};
}
},
@@ -10443,7 +10492,7 @@ fn NewParser_(
};
}
- fn parseAndDeclareDecls(p: *P, kind: Symbol.Kind, opts: *ParseStatementOptions) anyerror![]G.Decl {
+ fn parseAndDeclareDecls(p: *P, kind: Symbol.Kind, opts: *ParseStatementOptions) anyerror!ListManaged(G.Decl) {
var decls = ListManaged(G.Decl).init(p.allocator);
while (true) {
@@ -10487,7 +10536,7 @@ fn NewParser_(
try p.lexer.next();
}
- return decls.items;
+ return decls;
}
pub fn parseTypescriptEnumStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
@@ -12505,14 +12554,15 @@ fn NewParser_(
},
.t_question_dot => {
try p.lexer.next();
- var optional_start = js_ast.OptionalChain.start;
+ var optional_start: ?js_ast.OptionalChain = js_ast.OptionalChain.start;
- // TODO: Remove unnecessary optional chains
- // if p.options.mangleSyntax {
- // if isNullOrUndefined, _, ok := toNullOrUndefinedWithSideEffects(left.Data); ok and !isNullOrUndefined {
- // optionalStart = js_ast.OptionalChainNone
- // }
- // }
+ // Remove unnecessary optional chains
+ if (p.options.features.minify_syntax) {
+ const result = SideEffects.toNullOrUndefined(left.data);
+ if (result.ok and !result.value) {
+ optional_start = null;
+ }
+ }
switch (p.lexer.token) {
.t_open_bracket => {
@@ -12609,7 +12659,7 @@ fn NewParser_(
}
// Only continue if we have started
- if (optional_start == .start) {
+ if ((optional_start orelse .ccontinue) == .start) {
optional_start = .ccontinue;
}
},
@@ -14386,7 +14436,7 @@ fn NewParser_(
decls[0] = Decl{
.binding = p.b(B.Identifier{ .ref = ref }, local.loc),
};
- try partStmts.append(p.s(S.Local{ .decls = decls }, local.loc));
+ try partStmts.append(p.s(S.Local{ .decls = G.Decl.List.init(decls) }, local.loc));
}
}
p.relocated_top_level_vars.clearRetainingCapacity();
@@ -14537,7 +14587,7 @@ fn NewParser_(
}
},
.s_local => |st| {
- for (st.decls) |*decl| {
+ for (st.decls.slice()) |*decl| {
if (!p.bindingCanBeRemovedIfUnused(decl.binding)) {
return false;
}
@@ -17797,20 +17847,20 @@ fn NewParser_(
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
const decls_len = if (!(data.is_export and p.options.features.replace_exports.entries.len > 0))
- p.visitDecls(data.decls, data.kind == .k_const, false)
+ p.visitDecls(data.decls.slice(), data.kind == .k_const, false)
else
- p.visitDecls(data.decls, data.kind == .k_const, true);
+ p.visitDecls(data.decls.slice(), data.kind == .k_const, true);
const is_now_dead = data.decls.len > 0 and decls_len == 0;
if (is_now_dead) {
return;
}
- data.decls.len = decls_len;
+ data.decls.len = @truncate(u32, decls_len);
// Handle being exported inside a namespace
if (data.is_export and p.enclosing_namespace_arg_ref != null) {
- for (data.decls) |*d| {
+ for (data.decls.slice()) |*d| {
if (d.value) |val| {
p.recordUsage((p.enclosing_namespace_arg_ref orelse unreachable));
// TODO: is it necessary to lowerAssign? why does esbuild do it _most_ of the time?
@@ -17827,7 +17877,7 @@ fn NewParser_(
// Edgecase:
// `export var` is skipped because it's unnecessary. That *should* be a noop, but it loses the `is_export` flag if we're in HMR.
if (data.kind == .k_var and !data.is_export) {
- const relocated = p.maybeRelocateVarsToTopLevel(data.decls, .normal);
+ const relocated = p.maybeRelocateVarsToTopLevel(data.decls.slice(), .normal);
if (relocated.ok) {
if (relocated.stmt) |new_stmt| {
stmts.append(new_stmt) catch unreachable;
@@ -17892,7 +17942,7 @@ fn NewParser_(
.kind = .k_var,
.is_export = false,
.was_commonjs_export = true,
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
},
stmt.loc,
),
@@ -18088,7 +18138,7 @@ fn NewParser_(
// must be done inside the scope of the for loop or they won't be relocated.
if (data.init) |init_| {
if (init_.data == .s_local and init_.data.s_local.kind == .k_var) {
- const relocate = p.maybeRelocateVarsToTopLevel(init_.data.s_local.decls, .normal);
+ const relocate = p.maybeRelocateVarsToTopLevel(init_.data.s_local.decls.slice(), .normal);
if (relocate.stmt) |relocated| {
data.init = relocated;
}
@@ -18111,7 +18161,7 @@ fn NewParser_(
// Lower for-in variable initializers in case the output is used in strict mode
var local = data.init.data.s_local;
if (local.decls.len == 1) {
- var decl: *G.Decl = &local.decls[0];
+ var decl: *G.Decl = &local.decls.ptr[0];
if (decl.binding.data == .b_identifier) {
if (decl.value) |val| {
stmts.append(
@@ -18128,7 +18178,7 @@ fn NewParser_(
}
if (data.init.data == .s_local and data.init.data.s_local.kind == .k_var) {
- const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls, RelocateVars.Mode.for_in_or_for_of);
+ const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of);
if (relocate.stmt) |relocated_stmt| {
data.init = relocated_stmt;
}
@@ -18143,7 +18193,7 @@ fn NewParser_(
data.body = p.visitLoopBody(data.body);
if (data.init.data == .s_local and data.init.data.s_local.kind == .k_var) {
- const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls, RelocateVars.Mode.for_in_or_for_of);
+ const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of);
if (relocate.stmt) |relocated_stmt| {
data.init = relocated_stmt;
}
@@ -18452,7 +18502,7 @@ fn NewParser_(
switch (child_stmt.data) {
.s_local => |local| {
if (local.is_export) {
- p.markExportedDeclsInsideNamespace(data.arg, local.decls);
+ p.markExportedDeclsInsideNamespace(data.arg, local.decls.slice());
}
},
else => {},
@@ -18599,7 +18649,7 @@ fn NewParser_(
var local = p.s(
S.Local{
.is_export = true,
- .decls = decls,
+ .decls = Decl.List.init(decls),
},
loc,
);
@@ -18620,7 +18670,7 @@ fn NewParser_(
var local = p.s(
S.Local{
.is_export = true,
- .decls = decls,
+ .decls = Decl.List.init(decls),
},
loc,
);
@@ -18832,7 +18882,7 @@ fn NewParser_(
p.s(
S.Local{
.kind = .k_var,
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
.is_export = is_export,
},
stmt_loc,
@@ -18844,7 +18894,7 @@ fn NewParser_(
p.s(
S.Local{
.kind = .k_let,
- .decls = decls,
+ .decls = G.Decl.List.init(decls),
},
stmt_loc,
),
@@ -19161,7 +19211,7 @@ fn NewParser_(
st.value = p.visitExprInOut(st.value, ExprIn{ .assign_target = assign_target });
},
.s_local => |st| {
- for (st.decls) |*dec| {
+ for (st.decls.slice()) |*dec| {
p.visitBinding(dec.binding, null);
if (dec.value) |val| {
dec.value = p.visitExpr(val);
@@ -19396,7 +19446,7 @@ fn NewParser_(
p.popScope();
}
- return p.stmtsToSingleStmt(stmt.loc, stmts.toOwnedSlice() catch @panic("TODO"));
+ return p.stmtsToSingleStmt(stmt.loc, stmts.items);
}
// One statement could potentially expand to several statements
@@ -19405,7 +19455,7 @@ fn NewParser_(
return Stmt{ .data = Prefill.Data.SEmpty, .loc = loc };
}
- if (stmts.len == 1 and std.meta.activeTag(stmts[0].data) != .s_local or (std.meta.activeTag(stmts[0].data) == .s_local and stmts[0].data.s_local.kind == S.Local.Kind.k_var)) {
+ if (stmts.len == 1 and !statementCaresAboutScope(stmts[0])) {
// "let" and "const" must be put in a block when in a single-statement context
return stmts[0];
}
@@ -19884,7 +19934,7 @@ fn NewParser_(
before.appendAssumeCapacity(p.s(
S.Local{
.kind = .k_let,
- .decls = let_decls.items,
+ .decls = Decl.List.fromList(let_decls),
},
let_decls.items[0].value.?.loc,
));
@@ -19900,7 +19950,7 @@ fn NewParser_(
before.appendAssumeCapacity(p.s(
S.Local{
.kind = .k_var,
- .decls = var_decls.items,
+ .decls = Decl.List.fromList(var_decls),
},
var_decls.items[0].value.?.loc,
));
@@ -19953,7 +20003,7 @@ fn NewParser_(
// if this fails it means that scope pushing/popping is not balanced
assert(p.current_scope == initial_scope);
- if (!p.options.features.inlining) {
+ if (!p.options.features.minify_syntax) {
return;
}
@@ -19971,7 +20021,7 @@ fn NewParser_(
.s_empty, .s_comment, .s_directive, .s_debugger, .s_type_script => continue,
.s_local => |local| {
if (!local.is_export and local.kind == .k_const and !local.was_commonjs_export) {
- var decls: []Decl = local.decls;
+ var decls: []Decl = local.decls.slice();
var end: usize = 0;
for (decls) |decl| {
if (decl.binding.data == .b_identifier) {
@@ -19982,7 +20032,7 @@ fn NewParser_(
decls[end] = decl;
end += 1;
}
- local.decls.len = end;
+ local.decls.len = @truncate(u32, end);
if (end == 0) {
stmt.* = stmt.*.toEmpty();
}
@@ -19997,6 +20047,8 @@ fn NewParser_(
}
}
+ var is_control_flow_dead = false;
+
// Inline single-use variable declarations where possible:
//
// // Before
@@ -20018,6 +20070,10 @@ fn NewParser_(
var output = ListManaged(Stmt).initCapacity(p.allocator, stmts.items.len) catch unreachable;
for (stmts.items) |stmt| {
+ if (is_control_flow_dead and !SideEffects.shouldKeepStmtInDeadControlFlow(stmt, p.allocator)) {
+ // Strip unnecessary statements if the control flow is dead here
+ continue;
+ }
// Keep inlining variables until a failure or until there are none left.
// That handles cases like this:
@@ -20044,7 +20100,7 @@ fn NewParser_(
break;
}
- var last: *Decl = &local.decls[local.decls.len - 1];
+ var last: *Decl = local.decls.last().?;
// The variable must be initialized, since we will be substituting
// the value into the usage.
if (last.value == null)
@@ -20088,11 +20144,101 @@ fn NewParser_(
break;
}
- if (stmt.data != .s_empty) {
- output.appendAssumeCapacity(
- stmt,
- );
+ switch (stmt.data) {
+ .s_empty => continue,
+
+ // skip directives for now
+ .s_directive => continue,
+
+ .s_local => |local| {
+ // Merge adjacent local statements
+ if (output.items.len > 0) {
+ var prev_stmt = &output.items[output.items.len - 1];
+ if (prev_stmt.data == .s_local and local.kind == prev_stmt.data.s_local.kind and local.is_export == prev_stmt.data.s_local.is_export) {
+ prev_stmt.data.s_local.decls.append(p.allocator, local.decls.slice()) catch unreachable;
+ continue;
+ }
+ }
+ },
+
+ .s_expr => |s_expr| {
+ // Merge adjacent expression statements
+ if (output.items.len > 0) {
+ var prev_stmt = &output.items[output.items.len - 1];
+ if (prev_stmt.data == .s_expr) {
+ prev_stmt.data.s_expr.does_not_affect_tree_shaking = prev_stmt.data.s_expr.does_not_affect_tree_shaking and
+ s_expr.does_not_affect_tree_shaking;
+ prev_stmt.data.s_expr.value = prev_stmt.data.s_expr.value.joinWithComma(
+ s_expr.value,
+ p.allocator,
+ );
+ continue;
+ }
+ }
+ },
+ .s_switch => |s_switch| {
+ // Absorb a previous expression statement
+ if (output.items.len > 0) {
+ var prev_stmt = &output.items[output.items.len - 1];
+ if (prev_stmt.data == .s_expr) {
+ s_switch.test_ = prev_stmt.data.s_expr.value.joinWithComma(s_switch.test_, p.allocator);
+ output.items.len -= 1;
+ }
+ }
+ },
+ .s_if => |s_if| {
+ // Absorb a previous expression statement
+ if (output.items.len > 0) {
+ var prev_stmt = &output.items[output.items.len - 1];
+ if (prev_stmt.data == .s_expr) {
+ s_if.test_ = prev_stmt.data.s_expr.value.joinWithComma(s_if.test_, p.allocator);
+ output.items.len -= 1;
+ }
+ }
+
+ // TODO: optimize jump
+ },
+
+ .s_return => |ret| {
+ // Merge return statements with the previous expression statement
+ if (output.items.len > 0 and ret.value != null) {
+ var prev_stmt = &output.items[output.items.len - 1];
+ if (prev_stmt.data == .s_expr) {
+ ret.value = prev_stmt.data.s_expr.value.joinWithComma(ret.value.?, p.allocator);
+ prev_stmt.* = stmt;
+ continue;
+ }
+ }
+
+ is_control_flow_dead = true;
+ },
+
+ .s_break, .s_continue => {
+ is_control_flow_dead = true;
+ },
+
+ .s_throw => {
+ // Merge throw statements with the previous expression statement
+ if (output.items.len > 0) {
+ var prev_stmt = &output.items[output.items.len - 1];
+ if (prev_stmt.data == .s_expr) {
+ prev_stmt.* = p.s(S.Throw{
+ .value = prev_stmt.data.s_expr.value.joinWithComma(
+ stmt.data.s_throw.value,
+ p.allocator,
+ ),
+ }, stmt.loc);
+ continue;
+ }
+ }
+
+ is_control_flow_dead = true;
+ },
+
+ else => {},
}
+
+ output.append(stmt) catch unreachable;
}
stmts.deinit();
stmts.* = output;
@@ -20815,7 +20961,7 @@ fn NewParser_(
toplevel_stmts[toplevel_stmts_i] = p.s(
S.Local{
- .decls = first_decl,
+ .decls = G.Decl.List.init(first_decl),
},
logger.Loc.Empty,
);
@@ -20873,7 +21019,7 @@ fn NewParser_(
if (named_export_i > 0) {
toplevel_stmts[toplevel_stmts_i] = p.s(
S.Local{
- .decls = exports_decls[0..named_export_i],
+ .decls = G.Decl.List.init(exports_decls[0..named_export_i]),
},
logger.Loc.Empty,
);
@@ -21023,6 +21169,17 @@ fn NewParser_(
.import_keyword = p.esm_import_keyword,
.export_keyword = p.esm_export_keyword,
.top_level_symbols_to_parts = top_level_symbols_to_parts,
+ .char_freq = p.computeCharacterFrequency(),
+
+ // Assign slots to symbols in nested scopes. This is some precomputation for
+ // the symbol renaming pass that will happen later in the linker. It's done
+ // now in the parser because we want it to be done in parallel per file and
+ // we're already executing code in parallel here
+ .nested_scope_slot_counts = if (p.options.features.minify_identifiers)
+ renamer.assignNestedScopeSlots(p.allocator, p.module_scope, p.symbols.items)
+ else
+ js_ast.SlotCounts{},
+
.require_ref = if (p.runtime_imports.__require != null)
p.runtime_imports.__require.?.ref
else
diff --git a/src/js_printer.zig b/src/js_printer.zig
index acc778b18..2fc790c79 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -494,6 +494,9 @@ pub const Options = struct {
commonjs_named_exports_ref: Ref = Ref.None,
minify_whitespace: bool = false,
+ minify_identifiers: bool = false,
+ minify_syntax: bool = false,
+ transform_only: bool = false,
require_or_import_meta_for_source_callback: RequireOrImportMeta.Callback = .{},
@@ -690,7 +693,6 @@ fn NewPrinter(
comptime Writer: type,
comptime rewrite_esm_to_cjs: bool,
comptime is_bun_platform: bool,
- comptime is_inside_bundle: bool,
comptime is_json: bool,
comptime generate_source_map: bool,
) type {
@@ -950,12 +952,20 @@ fn NewPrinter(
return .comma;
}
- pub inline fn printUndefined(p: *Printer, _: Level) void {
- // void 0 is more efficient in output size
- // however, "void 0" is the same as "undefined" is a point of confusion for many
- // since we are optimizing for development, undefined is more clear.
- // an ideal development bundler would output very readable code, even without source maps.
- p.print("undefined");
+ pub inline fn printUndefined(p: *Printer, loc: logger.Loc, level: Level) void {
+ if (p.options.minify_syntax) {
+ if (level.gte(Level.prefix)) {
+ p.addSourceMapping(loc);
+ p.print("(void 0)");
+ } else {
+ p.printSpaceBeforeIdentifier();
+ p.addSourceMapping(loc);
+ p.print("void 0");
+ }
+ } else {
+ p.addSourceMapping(loc);
+ p.print("undefined");
+ }
}
pub fn printBody(p: *Printer, stmt: Stmt) void {
@@ -1904,8 +1914,7 @@ fn NewPrinter(
switch (expr.data) {
.e_missing => {},
.e_undefined => {
- p.addSourceMapping(expr.loc);
- p.printUndefined(level);
+ p.printUndefined(expr.loc, level);
},
.e_super => {
p.printSpaceBeforeIdentifier();
@@ -2245,10 +2254,12 @@ fn NewPrinter(
}
p.printExpr(e.test_, .conditional, flags);
p.printSpace();
- p.print("? ");
+ p.print("?");
+ p.printSpace();
p.printExpr(e.yes, .yield, ExprFlag.None());
p.printSpace();
- p.print(": ");
+ p.print(":");
+ p.printSpace();
flags.insert(.forbid_in);
p.printExpr(e.no, .yield, flags);
if (wrap) {
@@ -2449,9 +2460,17 @@ fn NewPrinter(
}
},
.e_boolean => |e| {
- p.printSpaceBeforeIdentifier();
p.addSourceMapping(expr.loc);
- p.print(if (e.value) "true" else "false");
+ if (p.options.minify_syntax) {
+ if (level.gte(Level.prefix)) {
+ p.print(if (e.value) "(!0)" else "(!1)");
+ } else {
+ p.print(if (e.value) "!0" else "!1");
+ }
+ } else {
+ p.printSpaceBeforeIdentifier();
+ p.print(if (e.value) "true" else "false");
+ }
},
.e_string => |e| {
e.resovleRopeIfNeeded(p.options.allocator);
@@ -2586,13 +2605,12 @@ fn NewPrinter(
const symbol = p.symbols().get(ref).?;
if (symbol.import_item_status == .missing) {
- p.addSourceMapping(expr.loc);
- p.printUndefined(level);
+ p.printUndefined(expr.loc, level);
didPrint = true;
} else if (symbol.namespace_alias) |namespace| {
if (namespace.import_record_index < p.import_records.len) {
const import_record = p.importRecord(namespace.import_record_index);
- if ((comptime is_inside_bundle) or import_record.is_legacy_bundled or namespace.was_originally_property_access) {
+ if (import_record.is_legacy_bundled or namespace.was_originally_property_access) {
var wrap = false;
didPrint = true;
@@ -2604,6 +2622,7 @@ fn NewPrinter(
if (wrap) {
p.printWhitespacer(ws("(0, "));
}
+ p.printSpaceBeforeIdentifier();
p.addSourceMapping(expr.loc);
p.printNamespaceAlias(import_record.*, namespace);
@@ -2667,6 +2686,7 @@ fn NewPrinter(
}
if (!didPrint) {
+ p.printSpaceBeforeIdentifier();
p.addSourceMapping(expr.loc);
p.printSymbol(e.ref);
}
@@ -2835,7 +2855,7 @@ fn NewPrinter(
p.printSpaceBeforeIdentifier();
p.print(entry.text);
} else {
- p.printSpaceBeforeIdentifier();
+ p.printSpaceBeforeOperator(e.op);
p.print(entry.text);
p.prev_op = e.op;
p.prev_op_end = p.writer.written;
@@ -3576,20 +3596,10 @@ fn NewPrinter(
.s_export_default => |s| {
p.printIndent();
p.printSpaceBeforeIdentifier();
-
- if (!is_inside_bundle) {
- p.print("export default");
- }
-
- p.printSpace();
+ p.print("export default ");
switch (s.value) {
.expr => |expr| {
- // this is still necessary for JSON
- if (is_inside_bundle) {
- p.printModuleExportSymbol();
- p.@"print = "();
- }
// Functions and classes must be wrapped to avoid confusion with their statement forms
p.export_default_start = p.writer.written;
@@ -3602,12 +3612,6 @@ fn NewPrinter(
switch (s2.data) {
.s_function => |func| {
p.printSpaceBeforeIdentifier();
- if (is_inside_bundle) {
- if (func.func.name == null) {
- p.printModuleExportSymbol();
- p.@"print = "();
- }
- }
if (func.func.flags.contains(.is_async)) {
p.print("async ");
@@ -3627,30 +3631,11 @@ fn NewPrinter(
p.printFunc(func.func);
- if (is_inside_bundle) {
- p.printSemicolonAfterStatement();
- }
-
- if (is_inside_bundle) {
- if (func.func.name) |name| {
- p.printIndent();
- p.printBundledExport("default", p.renamer.nameForSymbol(name.ref.?));
- p.printSemicolonAfterStatement();
- }
- } else {
- p.printNewline();
- }
+ p.printNewline();
},
.s_class => |class| {
p.printSpaceBeforeIdentifier();
- if (is_inside_bundle) {
- if (class.class.class_name == null) {
- p.printModuleExportSymbol();
- p.@"print = "();
- }
- }
-
if (class.class.class_name) |name| {
p.print("class ");
p.printSymbol(name.ref orelse Global.panic("Internal error: Expected class to have a name ref\n{any}", .{class}));
@@ -3660,19 +3645,7 @@ fn NewPrinter(
p.printClass(class.class);
- if (is_inside_bundle) {
- p.printSemicolonAfterStatement();
- }
-
- if (is_inside_bundle) {
- if (class.class.class_name) |name| {
- p.printIndent();
- p.printBundledExport("default", p.renamer.nameForSymbol(name.ref.?));
- p.printSemicolonAfterStatement();
- }
- } else {
- p.printNewline();
- }
+ p.printNewline();
},
else => {
Global.panic("Internal error: unexpected export default stmt data {any}", .{s});
@@ -3682,30 +3655,6 @@ fn NewPrinter(
}
},
.s_export_star => |s| {
- if (is_inside_bundle) {
- p.printIndent();
- p.printSpaceBeforeIdentifier();
-
- // module.exports.react = $react();
- if (s.alias) |alias| {
- p.printBundledRexport(alias.original_name, s.import_record_index);
- p.printSemicolonAfterStatement();
-
- return;
- // module.exports = $react();
- } else {
- p.printSymbol(p.options.runtime_imports.__reExport.?.ref);
- p.print("(");
- p.printModuleExportSymbol();
- p.print(",");
-
- p.printLoadFromBundle(s.import_record_index);
-
- p.print(")");
- p.printSemicolonAfterStatement();
- return;
- }
- }
// Give an extra newline for readaiblity
if (!prev_stmt_tag.isExportLike()) {
@@ -3737,11 +3686,7 @@ fn NewPrinter(
// Object.assign(__export, {prop1, prop2, prop3});
else => {
- if (comptime is_inside_bundle) {
- p.printSymbol(p.options.runtime_imports.__export.?.ref);
- } else {
- p.print("Object.assign");
- }
+ p.print("Object.assign");
p.print("(");
p.printModuleExportSymbol();
@@ -3757,7 +3702,7 @@ fn NewPrinter(
if (symbol.namespace_alias) |namespace| {
const import_record = p.importRecord(namespace.import_record_index);
- if (import_record.is_legacy_bundled or (comptime is_inside_bundle) or namespace.was_originally_property_access) {
+ if (import_record.is_legacy_bundled or namespace.was_originally_property_access) {
p.printIdentifier(name);
p.print(": () => ");
p.printNamespaceAlias(import_record.*, namespace);
@@ -3767,13 +3712,7 @@ fn NewPrinter(
if (!did_print) {
p.printClauseAlias(item.alias);
- if (comptime is_inside_bundle) {
- p.print(":");
- p.printSpace();
- p.print("() => ");
-
- p.printIdentifier(name);
- } else if (!strings.eql(name, item.alias)) {
+ if (!strings.eql(name, item.alias)) {
p.print(":");
p.printSpaceBeforeIdentifier();
p.printIdentifier(name);
@@ -3828,7 +3767,7 @@ fn NewPrinter(
if (p.symbols().get(item.name.ref.?)) |symbol| {
if (symbol.namespace_alias) |namespace| {
const import_record = p.importRecord(namespace.import_record_index);
- if (import_record.is_legacy_bundled or (comptime is_inside_bundle) or namespace.was_originally_property_access) {
+ if (import_record.is_legacy_bundled or namespace.was_originally_property_access) {
p.print("var ");
p.printSymbol(item.name.ref.?);
p.@"print = "();
@@ -3895,49 +3834,6 @@ fn NewPrinter(
p.printSemicolonAfterStatement();
},
.s_export_from => |s| {
- if (is_inside_bundle) {
- p.printIndent();
- // $$lz(export, $React(), {default: "React"});
- if (s.items.len == 1) {
- const item = s.items[0];
- p.printSymbol(p.options.runtime_imports.@"$$lzy".?.ref);
- p.print("(");
- p.printModuleExportSymbol();
- p.print(",");
- // Avoid initializing an entire component library because you imported one icon
- p.printLoadFromBundleWithoutCall(s.import_record_index);
- p.print(",{");
- p.printClauseAlias(item.alias);
- p.print(":");
- const name = p.renamer.nameForSymbol(item.name.ref.?);
- p.printQuotedUTF8(name, true);
- p.print("})");
-
- p.printSemicolonAfterStatement();
- // $$lz(export, $React(), {createElement: "React"});
- } else {
- p.printSymbol(p.options.runtime_imports.@"$$lzy".?.ref);
- p.print("(");
- p.printModuleExportSymbol();
- p.print(",");
-
- // Avoid initializing an entire component library because you imported one icon
- p.printLoadFromBundleWithoutCall(s.import_record_index);
- p.print(",{");
- for (s.items, 0..) |item, i| {
- p.printClauseAlias(item.alias);
- p.print(":");
- p.printQuotedUTF8(p.renamer.nameForSymbol(item.name.ref.?), true);
- if (i < s.items.len - 1) {
- p.print(",");
- }
- }
- p.print("})");
- p.printSemicolonAfterStatement();
- }
-
- return;
- }
p.printIndent();
p.printSpaceBeforeIdentifier();
@@ -4037,13 +3933,13 @@ fn NewPrinter(
.s_local => |s| {
switch (s.kind) {
.k_const => {
- p.printDeclStmt(s.is_export, "const", s.decls);
+ p.printDeclStmt(s.is_export, "const", s.decls.slice());
},
.k_let => {
- p.printDeclStmt(s.is_export, "let", s.decls);
+ p.printDeclStmt(s.is_export, "let", s.decls.slice());
},
.k_var => {
- p.printDeclStmt(s.is_export, "var", s.decls);
+ p.printDeclStmt(s.is_export, "var", s.decls.slice());
},
}
},
@@ -4353,10 +4249,6 @@ fn NewPrinter(
}
}
- if (is_inside_bundle) {
- return p.printBundledImport(record.*, s);
- }
-
if (record.do_commonjs_transform_in_printer or record.path.is_disabled) {
const require_ref = p.options.require_ref;
@@ -4428,9 +4320,11 @@ fn NewPrinter(
}
if (!record.path.is_disabled and std.mem.indexOfScalar(u32, p.imported_module_ids.items, module_id) == null) {
- p.printWhitespacer(ws("import * as "));
+ p.printWhitespacer(ws("import * as"));
+ p.print(" ");
p.printModuleId(module_id);
- p.printWhitespacer(ws(" from "));
+ p.print(" ");
+ p.printWhitespacer(ws("from "));
p.print("\"");
p.print(record.path.text);
p.print("\"");
@@ -4612,13 +4506,15 @@ fn NewPrinter(
}
p.printSpace();
- p.printWhitespacer(ws("* as "));
+ p.printWhitespacer(ws("* as"));
+ p.print(" ");
p.printSymbol(s.namespace_ref);
item_count += 1;
}
if (item_count > 0) {
- p.printWhitespacer(ws(" from "));
+ p.print(" ");
+ p.printWhitespacer(ws("from "));
}
p.printImportRecordPath(record);
@@ -4875,13 +4771,7 @@ fn NewPrinter(
return;
}
- // the require symbol may not exist in bundled code
- // it is included at the top of the file.
- if (comptime is_inside_bundle) {
- p.print("__require");
- } else {
- p.printSymbol(p.options.runtime_imports.__require.?.ref);
- }
+ p.printSymbol(p.options.runtime_imports.__require.?.ref);
// d is for default
p.print(".d(");
@@ -4928,13 +4818,13 @@ fn NewPrinter(
.s_local => |s| {
switch (s.kind) {
.k_var => {
- p.printDecls("var", s.decls, ExprFlag.Set.init(.{ .forbid_in = true }));
+ p.printDecls("var", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_let => {
- p.printDecls("let", s.decls, ExprFlag.Set.init(.{ .forbid_in = true }));
+ p.printDecls("let", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
.k_const => {
- p.printDecls("const", s.decls, ExprFlag.Set.init(.{ .forbid_in = true }));
+ p.printDecls("const", s.decls.slice(), ExprFlag.Set.init(.{ .forbid_in = true }));
},
}
},
@@ -5054,23 +4944,6 @@ fn NewPrinter(
}
pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl) void {
- if (rewrite_esm_to_cjs and keyword[0] == 'v' and is_export and is_inside_bundle) {
- // this is a top-level export
- if (decls.len == 1 and
- std.meta.activeTag(decls[0].binding.data) == .b_identifier and
- decls[0].binding.data.b_identifier.ref.eql(p.options.bundle_export_ref.?))
- {
- p.print("// ");
- p.print(p.options.source_path.?.pretty);
- p.print("\nexport var $");
- std.fmt.formatInt(p.options.module_hash, 16, .lower, .{}, p) catch unreachable;
- p.@"print = "();
- p.printExpr(decls[0].value.?, .comma, ExprFlag.None());
- p.printSemicolonAfterStatement();
- return;
- }
- }
-
p.printIndent();
p.printSpaceBeforeIdentifier();
@@ -5710,7 +5583,84 @@ pub fn printAst(
opts: Options,
comptime generate_source_map: bool,
) !usize {
- var renamer = rename.NoOpRenamer.init(symbols, source);
+ var renamer: rename.Renamer = undefined;
+ var no_op_renamer: rename.NoOpRenamer = undefined;
+ var module_scope = tree.module_scope;
+ if (opts.minify_identifiers) {
+ const allocator = opts.allocator;
+ var reserved_names = try rename.computeInitialReservedNames(allocator);
+ for (module_scope.children.slice()) |child| {
+ child.parent = &module_scope;
+ }
+
+ rename.computeReservedNamesForScope(&module_scope, &symbols, &reserved_names, allocator);
+ var minify_renamer = try rename.MinifyRenamer.init(allocator, symbols, tree.nested_scope_slot_counts, reserved_names);
+
+ var top_level_symbols = rename.StableSymbolCount.Array.init(allocator);
+ defer top_level_symbols.deinit();
+
+ const uses_exports_ref = tree.uses_exports_ref;
+ const uses_module_ref = tree.uses_module_ref;
+ const exports_ref = tree.exports_ref;
+ const module_ref = tree.module_ref;
+ const parts = tree.parts;
+
+ const dont_break_the_code = .{
+ tree.module_ref,
+ tree.exports_ref,
+ tree.require_ref,
+ };
+
+ inline for (dont_break_the_code) |ref| {
+ if (symbols.get(ref)) |symbol| {
+ symbol.must_not_be_renamed = true;
+ }
+ }
+
+ for (tree.named_exports.values()) |named_export| {
+ if (symbols.get(named_export.ref)) |symbol| {
+ symbol.must_not_be_renamed = true;
+ }
+ }
+
+ if (uses_exports_ref) {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, exports_ref, 1, &.{source.index.value});
+ }
+
+ if (uses_module_ref) {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, module_ref, 1, &.{source.index.value});
+ }
+
+ for (parts.slice()) |part| {
+ try minify_renamer.accumulateSymbolUseCounts(&top_level_symbols, part.symbol_uses, &.{source.index.value});
+
+ for (part.declared_symbols.refs()) |declared_ref| {
+ try minify_renamer.accumulateSymbolUseCount(&top_level_symbols, declared_ref, 1, &.{source.index.value});
+ }
+ }
+
+ std.sort.sort(rename.StableSymbolCount, top_level_symbols.items, {}, rename.StableSymbolCount.lessThan);
+
+ try minify_renamer.allocateTopLevelSymbolSlots(top_level_symbols);
+ var minifier = tree.char_freq.?.compile(allocator);
+ try minify_renamer.assignNamesByFrequency(&minifier);
+
+ renamer = minify_renamer.toRenamer();
+ } else {
+ no_op_renamer = rename.NoOpRenamer.init(symbols, source);
+ renamer = no_op_renamer.toRenamer();
+ }
+
+ defer {
+ if (opts.minify_identifiers) {
+ for (&renamer.MinifyRenamer.slots.values) |*val| {
+ val.deinit();
+ }
+ renamer.MinifyRenamer.reserved_names.deinit(opts.allocator);
+ renamer.MinifyRenamer.top_level_symbol_to_slot.deinit(opts.allocator);
+ opts.allocator.destroy(renamer.MinifyRenamer);
+ }
+ }
const PrinterType = NewPrinter(
ascii_only,
@@ -5719,7 +5669,6 @@ pub fn printAst(
// if it's ascii_only, it is also bun
ascii_only,
false,
- false,
generate_source_map,
);
var writer = _writer;
@@ -5728,7 +5677,7 @@ pub fn printAst(
writer,
tree.import_records.slice(),
opts,
- renamer.toRenamer(),
+ renamer,
getSourceMapBuilder(generate_source_map, ascii_only, opts, source, &tree),
);
defer {
@@ -5736,21 +5685,21 @@ pub fn printAst(
}
if (tree.prepend_part) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
try printer.printStmt(stmt);
if (printer.writer.getError()) {} else |err| {
return err;
}
+ printer.printSemicolonIfNeeded();
}
}
for (tree.parts.slice()) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
try printer.printStmt(stmt);
if (printer.writer.getError()) {} else |err| {
return err;
}
+ printer.printSemicolonIfNeeded();
}
}
@@ -5771,7 +5720,7 @@ pub fn printJSON(
expr: Expr,
source: *const logger.Source,
) !usize {
- const PrinterType = NewPrinter(false, Writer, false, false, false, true, false);
+ const PrinterType = NewPrinter(false, Writer, false, false, true, false);
var writer = _writer;
var s_expr = S.SExpr{ .value = expr };
var stmt = Stmt{ .loc = logger.Loc.Empty, .data = .{
@@ -5862,7 +5811,6 @@ pub fn printWithWriterAndPlatform(
is_bun_platform,
false,
false,
- false,
);
var writer = _writer;
var printer = PrinterType.init(
@@ -5880,13 +5828,13 @@ pub fn printWithWriterAndPlatform(
for (parts) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
printer.printStmt(stmt) catch |err| {
return .{ .err = err };
};
if (printer.writer.getError()) {} else |err| {
return .{ .err = err };
}
+ printer.printSemicolonIfNeeded();
}
}
@@ -5910,7 +5858,7 @@ pub fn printCommonJS(
opts: Options,
comptime generate_source_map: bool,
) !usize {
- const PrinterType = NewPrinter(ascii_only, Writer, true, false, false, false, generate_source_map);
+ const PrinterType = NewPrinter(ascii_only, Writer, true, false, false, generate_source_map);
var writer = _writer;
var renamer = rename.NoOpRenamer.init(symbols, source);
var printer = PrinterType.init(
@@ -5926,20 +5874,20 @@ pub fn printCommonJS(
if (tree.prepend_part) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
try printer.printStmt(stmt);
if (printer.writer.getError()) {} else |err| {
return err;
}
+ printer.printSemicolonIfNeeded();
}
}
for (tree.parts.slice()) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
try printer.printStmt(stmt);
if (printer.writer.getError()) {} else |err| {
return err;
}
+ printer.printSemicolonIfNeeded();
}
}
@@ -5989,7 +5937,7 @@ pub fn printCommonJSThreaded(
comptime getPos: fn (ctx: GetPosType) anyerror!u64,
end_off_ptr: *u32,
) !WriteResult {
- const PrinterType = NewPrinter(ascii_only, Writer, true, ascii_only, true, false, false);
+ const PrinterType = NewPrinter(ascii_only, Writer, true, ascii_only, false, false);
var writer = _writer;
var renamer = rename.NoOpRenamer.init(symbols, source);
var printer = PrinterType.init(
@@ -6005,21 +5953,21 @@ pub fn printCommonJSThreaded(
}
if (tree.prepend_part) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
try printer.printStmt(stmt);
if (printer.writer.getError()) {} else |err| {
return err;
}
+ printer.printSemicolonIfNeeded();
}
}
for (tree.parts.slice()) |part| {
for (part.stmts) |stmt| {
- printer.printSemicolonIfNeeded();
try printer.printStmt(stmt);
if (printer.writer.getError()) {} else |err| {
return err;
}
+ printer.printSemicolonIfNeeded();
}
}
diff --git a/src/options.zig b/src/options.zig
index 4d03e8abf..48b580746 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -1375,6 +1375,7 @@ pub const BundleOptions = struct {
env: Env = Env{},
transform_options: Api.TransformOptions,
polyfill_node_globals: bool = true,
+ transform_only: bool = false,
rewrite_jest_for_tests: bool = false,
@@ -1395,6 +1396,7 @@ pub const BundleOptions = struct {
inlining: bool = false,
minify_whitespace: bool = false,
minify_syntax: bool = false,
+ minify_identifiers: bool = false,
/// This is a list of packages which even when require() is used, we will
/// instead convert to ESM import statements.
diff --git a/src/renamer.zig b/src/renamer.zig
index 777b3beb5..2775986ff 100644
--- a/src/renamer.zig
+++ b/src/renamer.zig
@@ -11,6 +11,7 @@ const default_allocator = bun.default_allocator;
const C = bun.C;
const std = @import("std");
const Ref = @import("./ast/base.zig").Ref;
+const RefCtx = @import("./ast/base.zig").RefCtx;
const logger = @import("root").bun.logger;
const JSLexer = @import("./js_lexer.zig");
@@ -48,6 +49,7 @@ pub const NoOpRenamer = struct {
pub const Renamer = union(enum) {
NumberRenamer: *NumberRenamer,
NoOpRenamer: *NoOpRenamer,
+ MinifyRenamer: *MinifyRenamer,
pub fn symbols(this: Renamer) js_ast.Symbol.Map {
return switch (this) {
@@ -70,11 +72,394 @@ pub const Renamer = union(enum) {
pub fn deinit(renamer: Renamer) void {
switch (renamer) {
.NumberRenamer => |r| r.deinit(),
+ .MinifyRenamer => |r| r.deinit(),
else => {},
}
}
};
+pub const SymbolSlot = struct {
+ // Most minified names are under 15 bytes
+ // Instead of allocating a string for every symbol slot
+ // We can store the string inline!
+ // But we have to be very careful of where it's used.
+ // Or we WILL run into memory bugs.
+ name: TinyString = TinyString{ .string = "" },
+ count: u32 = 0,
+ needs_capital_for_jsx: bool = false,
+
+ pub const List = std.EnumArray(js_ast.Symbol.SlotNamespace, std.ArrayList(SymbolSlot));
+
+ pub const InlineString = struct {
+ bytes: [15]u8 = [_]u8{0} ** 15,
+ len: u8 = 0,
+
+ pub fn init(str: []const u8) InlineString {
+ var this: InlineString = .{};
+ this.len = @intCast(u8, @min(str.len, 15));
+ for (this.bytes[0..this.len], str[0..this.len]) |*b, c| {
+ b.* = c;
+ }
+ return this;
+ }
+
+ // do not make this *const or you will run into memory bugs.
+ // we cannot let the compiler decide to copy this struct because
+ // that would cause this to become a pointer to stack memory.
+ pub fn slice(this: *InlineString) string {
+ return this.bytes[0..this.len];
+ }
+ };
+
+ pub const TinyString = union(enum) {
+ inline_string: InlineString,
+ string: string,
+
+ pub fn init(input: string, allocator: std.mem.Allocator) !TinyString {
+ if (input.len <= 15) {
+ return TinyString{ .inline_string = InlineString.init(input) };
+ } else {
+ return TinyString{ .string = try allocator.dupe(u8, input) };
+ }
+ }
+
+ // do not make this *const or you will run into memory bugs.
+ // we cannot let the compiler decide to copy this struct because
+ // that would cause this to become a pointer to stack memory.
+ pub fn slice(this: *TinyString) string {
+ return switch (this.*) {
+ .inline_string => this.inline_string.slice(),
+ .string => this.string,
+ };
+ }
+ };
+};
+
+pub const MinifyRenamer = struct {
+ reserved_names: bun.StringHashMapUnmanaged(u32),
+ allocator: std.mem.Allocator,
+ slots: SymbolSlot.List = undefined,
+ top_level_symbol_to_slot: TopLevelSymbolSlotMap,
+ symbols: js_ast.Symbol.Map,
+
+ pub const TopLevelSymbolSlotMap = std.HashMapUnmanaged(Ref, usize, RefCtx, 80);
+
+ pub fn init(
+ allocator: std.mem.Allocator,
+ symbols: js_ast.Symbol.Map,
+ first_top_level_slots: js_ast.SlotCounts,
+ reserved_names: bun.StringHashMapUnmanaged(u32),
+ ) !*MinifyRenamer {
+ var renamer = try allocator.create(MinifyRenamer);
+ var slots = SymbolSlot.List.initUndefined();
+
+ for (first_top_level_slots.slots.values, 0..) |count, ns| {
+ slots.values[ns] = try std.ArrayList(SymbolSlot).initCapacity(allocator, count);
+ slots.values[ns].items.len = count;
+ std.mem.set(SymbolSlot, slots.values[ns].items[0..count], SymbolSlot{});
+ }
+
+ renamer.* = MinifyRenamer{
+ .symbols = symbols,
+ .reserved_names = reserved_names,
+ .slots = slots,
+ .top_level_symbol_to_slot = TopLevelSymbolSlotMap{},
+ .allocator = allocator,
+ };
+
+ return renamer;
+ }
+
+ pub fn deinit(this: *MinifyRenamer) void {
+ _ = this;
+ }
+
+ pub fn toRenamer(this: *MinifyRenamer) Renamer {
+ return .{
+ .MinifyRenamer = this,
+ };
+ }
+
+ pub fn nameForSymbol(this: *MinifyRenamer, _ref: Ref) string {
+ const ref = this.symbols.follow(_ref);
+ const symbol = this.symbols.get(ref).?;
+
+ const ns = symbol.slotNamespace();
+ if (ns == .must_not_be_renamed) {
+ return symbol.original_name;
+ }
+
+ const i = symbol.nestedScopeSlot() orelse
+ this.top_level_symbol_to_slot.get(ref) orelse
+ return symbol.original_name;
+
+ // This has to be a pointer because the string might be stored inline
+ return this.slots.getPtr(ns).items[i].name.slice();
+ }
+
+ pub fn originalName(this: *MinifyRenamer, ref: Ref) ?string {
+ _ = ref;
+ _ = this;
+ return null;
+ }
+
+ pub fn accumulateSymbolUseCounts(
+ this: *MinifyRenamer,
+ top_level_symbols: *StableSymbolCount.Array,
+ symbol_uses: js_ast.Part.SymbolUseMap,
+ stable_source_indices: []const u32,
+ ) !void {
+ // NOTE: This function is run in parallel. Make sure to avoid data races.
+ var iter = symbol_uses.iterator();
+ while (iter.next()) |value| {
+ try this.accumulateSymbolUseCount(top_level_symbols, value.key_ptr.*, value.value_ptr.*.count_estimate, stable_source_indices);
+ }
+ }
+
+ pub fn accumulateSymbolUseCount(
+ this: *MinifyRenamer,
+ top_level_symbols: *StableSymbolCount.Array,
+ _ref: Ref,
+ count: u32,
+ stable_source_indices: []const u32,
+ ) !void {
+ var ref = this.symbols.follow(_ref);
+ var symbol = this.symbols.get(ref).?;
+
+ while (symbol.namespace_alias != null) {
+ const ref_ = this.symbols.follow(symbol.namespace_alias.?.namespace_ref);
+ if (ref_.eql(ref)) break;
+ ref = ref_;
+ symbol = this.symbols.get(ref_).?;
+ }
+
+ const ns = symbol.slotNamespace();
+ if (ns == .must_not_be_renamed) return;
+
+ if (symbol.nestedScopeSlot()) |i| {
+ var slot = &this.slots.getPtr(ns).items[i];
+ slot.count += count;
+ if (symbol.must_start_with_capital_letter_for_jsx) {
+ slot.needs_capital_for_jsx = true;
+ }
+ return;
+ }
+
+ try top_level_symbols.append(StableSymbolCount{
+ .stable_source_index = stable_source_indices[ref.sourceIndex()],
+ .ref = ref,
+ .count = count,
+ });
+ }
+
+ pub fn allocateTopLevelSymbolSlots(this: *MinifyRenamer, top_level_symbols: StableSymbolCount.Array) !void {
+ for (top_level_symbols.items) |stable| {
+ const symbol = this.symbols.get(stable.ref).?;
+ var slots = this.slots.getPtr(symbol.slotNamespace());
+
+ var existing = try this.top_level_symbol_to_slot.getOrPut(this.allocator, stable.ref);
+ if (existing.found_existing) {
+ var slot = &slots.items[existing.value_ptr.*];
+ slot.count += stable.count;
+ if (symbol.must_start_with_capital_letter_for_jsx) {
+ slot.needs_capital_for_jsx = true;
+ }
+ } else {
+ existing.value_ptr.* = slots.items.len;
+ try slots.append(SymbolSlot{
+ .count = stable.count,
+ .needs_capital_for_jsx = symbol.must_start_with_capital_letter_for_jsx,
+ });
+ }
+ }
+ }
+
+ pub fn assignNamesByFrequency(this: *MinifyRenamer, name_minifier: *js_ast.NameMinifier) !void {
+ var name_buf = try std.ArrayList(u8).initCapacity(this.allocator, 64);
+ defer name_buf.deinit();
+
+ var sorted = SlotAndCount.Array.init(this.allocator);
+ defer sorted.deinit();
+
+ inline for (comptime std.enums.values(js_ast.Symbol.SlotNamespace)) |ns| {
+ var slots = this.slots.getPtr(ns);
+ sorted.clearRetainingCapacity();
+ try sorted.ensureUnusedCapacity(slots.items.len);
+ sorted.items.len = slots.items.len;
+
+ for (sorted.items, 0..) |*slot, i| {
+ slot.* = SlotAndCount{
+ .slot = @intCast(u32, i),
+ .count = slot.count,
+ };
+ }
+ std.sort.sort(SlotAndCount, sorted.items, {}, SlotAndCount.lessThan);
+
+ var next_name: isize = 0;
+
+ for (sorted.items) |data| {
+ var slot = &slots.items[data.slot];
+
+ try name_minifier.numberToMinifiedName(&name_buf, next_name);
+ next_name += 1;
+
+ // Make sure we never generate a reserved name. We only have to worry
+ // about collisions with reserved identifiers for normal symbols, and we
+ // only have to worry about collisions with keywords for labels. We do
+ // not have to worry about either for private names because they start
+ // with a "#" character.
+ switch (comptime ns) {
+ .default => {
+ while (this.reserved_names.contains(name_buf.items)) {
+ try name_minifier.numberToMinifiedName(&name_buf, next_name);
+ next_name += 1;
+ }
+
+ if (slot.needs_capital_for_jsx) {
+ while (name_buf.items[0] >= 'a' and name_buf.items[0] <= 'z') {
+ try name_minifier.numberToMinifiedName(&name_buf, next_name);
+ next_name += 1;
+ }
+ }
+ },
+ .label => {
+ while (JSLexer.Keywords.get(name_buf.items)) |_| {
+ try name_minifier.numberToMinifiedName(&name_buf, next_name);
+ next_name += 1;
+ }
+ },
+ .private_name => {
+ try name_buf.insert(0, '#');
+ },
+ else => {},
+ }
+
+ slot.name = SymbolSlot.TinyString.init(name_buf.items, this.allocator) catch unreachable;
+ }
+ }
+ }
+};
+
+pub fn assignNestedScopeSlots(allocator: std.mem.Allocator, module_scope: *js_ast.Scope, symbols: []js_ast.Symbol) js_ast.SlotCounts {
+ var slot_counts = js_ast.SlotCounts{};
+ var sorted_members = std.ArrayList(u32).init(allocator);
+ defer sorted_members.deinit();
+
+ // Temporarily set the nested scope slots of top-level symbols to valid so
+ // they aren't renamed in nested scopes. This prevents us from accidentally
+ // assigning nested scope slots to variables declared using "var" in a nested
+ // scope that are actually hoisted up to the module scope to become a top-
+ // level symbol.
+ const valid_slot: u32 = 0;
+ var members = module_scope.members.valueIterator();
+ while (members.next()) |member| {
+ symbols[member.ref.innerIndex()].nested_scope_slot = valid_slot;
+ }
+ for (module_scope.generated.slice()) |ref| {
+ symbols[ref.innerIndex()].nested_scope_slot = valid_slot;
+ }
+
+ for (module_scope.children.slice()) |child| {
+ slot_counts.unionMax(assignNestedScopeSlotsHelper(&sorted_members, child, symbols, js_ast.SlotCounts{}));
+ }
+
+ // Then set the nested scope slots of top-level symbols back to zero. Top-
+ // level symbols are not supposed to have nested scope slots.
+ members = module_scope.members.valueIterator();
+
+ while (members.next()) |member| {
+ symbols[member.ref.innerIndex()].nested_scope_slot = js_ast.Symbol.invalid_nested_scope_slot;
+ }
+ for (module_scope.generated.slice()) |ref| {
+ symbols[ref.innerIndex()].nested_scope_slot = js_ast.Symbol.invalid_nested_scope_slot;
+ }
+
+ return slot_counts;
+}
+
+pub fn assignNestedScopeSlotsHelper(sorted_members: *std.ArrayList(u32), scope: *js_ast.Scope, symbols: []js_ast.Symbol, slot_to_copy: js_ast.SlotCounts) js_ast.SlotCounts {
+ var slot = slot_to_copy;
+ // Sort member map keys for determinism
+ {
+ sorted_members.clearRetainingCapacity();
+ sorted_members.ensureUnusedCapacity(scope.members.count()) catch unreachable;
+ sorted_members.items.len = scope.members.count();
+ var sorted_members_buf = sorted_members.items;
+ var members = scope.members.valueIterator();
+ var i: usize = 0;
+ while (members.next()) |member| {
+ sorted_members_buf[i] = member.ref.innerIndex();
+ i += 1;
+ }
+ std.sort.sort(u32, sorted_members_buf, {}, std.sort.asc(u32));
+
+ // Assign slots for this scope's symbols. Only do this if the slot is
+ // not already assigned. Nested scopes have copies of symbols from parent
+ // scopes and we want to use the slot from the parent scope, not child scopes.
+ for (sorted_members_buf) |inner_index| {
+ var symbol = &symbols[inner_index];
+ const ns = symbol.slotNamespace();
+ if (ns != .must_not_be_renamed and symbol.nestedScopeSlot() == null) {
+ symbol.nested_scope_slot = slot.slots.get(ns);
+ slot.slots.getPtr(ns).* += 1;
+ }
+ }
+ }
+
+ for (scope.generated.slice()) |ref| {
+ var symbol = &symbols[ref.innerIndex()];
+ const ns = symbol.slotNamespace();
+ if (ns != .must_not_be_renamed and symbol.nestedScopeSlot() == null) {
+ symbol.nested_scope_slot = slot.slots.get(ns);
+ slot.slots.getPtr(ns).* += 1;
+ }
+ }
+
+ // Labels are always declared in a nested scope, so we don't need to check.
+ if (scope.label_ref) |ref| {
+ var symbol = &symbols[ref.innerIndex()];
+ const ns = js_ast.Symbol.SlotNamespace.label;
+ symbol.nested_scope_slot = slot.slots.get(ns);
+ slot.slots.getPtr(ns).* += 1;
+ }
+
+ // Assign slots for the symbols of child scopes
+ var slot_counts = slot;
+ for (scope.children.slice()) |child| {
+ slot_counts.unionMax(assignNestedScopeSlotsHelper(sorted_members, child, symbols, slot));
+ }
+
+ return slot_counts;
+}
+
+pub const StableSymbolCount = struct {
+ stable_source_index: u32,
+ ref: Ref,
+ count: u32,
+
+ pub const Array = std.ArrayList(StableSymbolCount);
+
+ pub fn lessThan(_: void, i: StableSymbolCount, j: StableSymbolCount) bool {
+ if (i.count > j.count) return true;
+ if (i.count < j.count) return false;
+ if (i.stable_source_index < j.stable_source_index) return true;
+ if (i.stable_source_index > j.stable_source_index) return false;
+
+ return i.ref.innerIndex() < j.ref.innerIndex();
+ }
+};
+
+const SlotAndCount = packed struct {
+ slot: u32,
+ count: u32,
+
+ pub const Array = std.ArrayList(SlotAndCount);
+
+ pub fn lessThan(_: void, a: SlotAndCount, b: SlotAndCount) bool {
+ return a.count > b.count or (a.count == b.count and a.slot < b.slot);
+ }
+};
+
pub const NumberRenamer = struct {
symbols: js_ast.Symbol.Map,
names: []bun.BabyList(string) = &.{},
@@ -429,6 +814,7 @@ pub const NumberRenamer = struct {
pub const ExportRenamer = struct {
string_buffer: bun.MutableString,
used: bun.StringHashMap(u32),
+ count: isize = 0,
pub fn init(allocator: std.mem.Allocator) ExportRenamer {
return ExportRenamer{
@@ -474,6 +860,12 @@ pub const ExportRenamer = struct {
return entry.key_ptr.*;
}
+
+ pub fn nextMinifiedName(this: *ExportRenamer, allocator: std.mem.Allocator) !string {
+ const name = try js_ast.NameMinifier.defaultNumberToMinifiedName(allocator, this.count);
+ this.count += 1;
+ return name;
+ }
};
pub fn computeInitialReservedNames(
diff --git a/src/runtime.zig b/src/runtime.zig
index fe95ba2ba..96a51699e 100644
--- a/src/runtime.zig
+++ b/src/runtime.zig
@@ -297,6 +297,7 @@ pub const Runtime = struct {
commonjs_named_exports: bool = true,
minify_syntax: bool = false,
+ minify_identifiers: bool = false,
/// Instead of jsx("div", {}, void 0)
/// ->
diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts
index 5a0802d13..94d476a0c 100644
--- a/test/bundler/expectBundled.ts
+++ b/test/bundler/expectBundled.ts
@@ -322,9 +322,6 @@ export function expectBundled(
if (!ESBUILD && keepNames) {
throw new Error("keepNames not implemented in bun build");
}
- if (!ESBUILD && minifyIdentifiers) {
- throw new Error("minifyIdentifiers not implemented in bun build");
- }
if (!ESBUILD && mainFields) {
throw new Error("mainFields not implemented in bun build");
}