const Bundler = bun.Bundler; const GenerateNodeModulesBundle = @This(); const bun = @import("bun"); const string = bun.string; const Output = bun.Output; const Global = bun.Global; const Environment = bun.Environment; const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const FeatureFlags = bun.FeatureFlags; const C = bun.C; const std = @import("std"); const lex = bun.js_lexer; const logger = @import("bun").logger; const options = @import("../options.zig"); const js_parser = bun.js_parser; const json_parser = bun.JSON; const js_printer = bun.js_printer; const js_ast = bun.JSAst; const linker = @import("../linker.zig"); const Ref = @import("../ast/base.zig").Ref; const Define = @import("../defines.zig").Define; const DebugOptions = @import("../cli.zig").Command.DebugOptions; const ThreadPoolLib = @import("../thread_pool.zig"); const Fs = @import("../fs.zig"); const schema = @import("../api/schema.zig"); const Api = schema.Api; const _resolver = @import("../resolver/resolver.zig"); const sync = @import("../sync.zig"); const ImportRecord = @import("../import_record.zig").ImportRecord; const allocators = @import("../allocators.zig"); const MimeType = @import("../http/mime_type.zig"); const resolve_path = @import("../resolver/resolve_path.zig"); const runtime = @import("../runtime.zig"); const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; const MacroRemap = @import("../resolver/package_json.zig").MacroMap; const DebugLogs = _resolver.DebugLogs; const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle; const Router = @import("../router.zig"); const isPackagePath = _resolver.isPackagePath; const Lock = @import("../lock.zig").Lock; const NodeFallbackModules = @import("../node_fallbacks.zig"); const CacheEntry = @import("../cache.zig").FsCacheEntry; const Analytics = @import("../analytics/analytics_thread.zig"); const URL = @import("../url.zig").URL; const Report = @import("../report.zig"); const Linker = linker.Linker; const Resolver = _resolver.Resolver; const TOML = @import("../toml/toml_parser.zig").TOML; const EntryPoints = @import("./entry_points.zig"); const BunQueue = sync.Channel(PendingImports, .Dynamic); const GenerateNodeModuleBundle = @This(); const ThisBundler = bun.Bundler; const JSC = @import("bun").JSC; pub const ThreadPool = struct { pool: ThreadPoolLib = undefined, // Hardcode 512 as max number of threads for now. workers: [512]Worker = undefined, workers_used: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), cpu_count: u32 = 0, started_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), stopped_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), completed_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), pending_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), generator: *GenerateNodeModuleBundle = undefined, pub fn start(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void { generator.bundler.env.loadProcess(); this.generator = generator; this.cpu_count = @truncate(u32, @divFloor((try std.Thread.getCpuCount()) + 1, 2)); if (generator.bundler.env.map.get("GOMAXPROCS")) |max_procs| { if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count| { this.cpu_count = std.math.min(this.cpu_count, cpu_count); } else |_| {} } this.pool = ThreadPoolLib.init(.{ .max_threads = this.cpu_count, }); this.pool.on_thread_spawn = Worker.onSpawn; this.pool.threadpool_context = this; var workers_used: u32 = 0; while (workers_used < this.cpu_count) : (workers_used += 1) { try this.workers[workers_used].init(generator); } if (workers_used > 0) this.pool.forceSpawn(); } pub fn wait(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void { while (true) { while (this.generator.queue.tryReadItem() catch null) |queue| { var iter = queue.iterator(); var batch = ThreadPoolLib.Batch{}; var count: u32 = 0; while (iter.next()) |entry| { const module_id: u32 = entry.key_ptr.*; const exists = generator.enqueued_map.getOrPut(module_id) catch unreachable; if (exists.found_existing) { continue; } batch.push(ThreadPoolLib.Batch.from(&entry.value_ptr.*.task)); count += 1; } _ = this.pending_count.fetchAdd(count, .Monotonic); this.pool.schedule(batch); } if (this.completed_count.load(.Monotonic) > 0 and this.completed_count.load(.Monotonic) == this.pending_count.load(.Monotonic)) { break; } std.atomic.spinLoopHint(); } const workers: []const Worker = this.workers[0..this.workers_used.loadUnchecked()]; for (workers) |worker| { this.generator.estimated_input_lines_of_code += worker.data.estimated_input_lines_of_code; try worker.data.log.appendTo(this.generator.log); } } pub const Task = struct { result: _resolver.Result, generator: *GenerateNodeModuleBundle, }; pub const Worker = struct { thread_id: std.Thread.Id, thread: std.Thread, allocator: std.mem.Allocator, generator: *GenerateNodeModuleBundle, data: *WorkerData = undefined, quit: bool = false, has_notify_started: bool = false, pub const WorkerData = struct { shared_buffer: MutableString = undefined, scan_pass_result: js_parser.ScanPassResult = undefined, log: *logger.Log, estimated_input_lines_of_code: usize = 0, macro_context: js_ast.Macro.MacroContext, bundler: Bundler = undefined, pub fn deinit(this: *WorkerData, allocator: std.mem.Allocator) void { this.shared_buffer.deinit(); this.scan_pass_result.named_imports.deinit(); this.scan_pass_result.import_records.deinit(); allocator.destroy(this); } }; pub fn init(worker: *Worker, generator: *GenerateNodeModuleBundle) !void { worker.generator = generator; worker.allocator = generator.allocator; } pub fn onSpawn(ctx: ?*anyopaque) ?*anyopaque { var pool = @ptrCast(*ThreadPool, @alignCast(@alignOf(*ThreadPool), ctx.?)); const id = pool.workers_used.fetchAdd(1, .Monotonic); pool.workers[id].run(); return &pool.workers[id]; } pub fn notifyStarted(this: *Worker) void { if (!this.has_notify_started) { this.has_notify_started = true; _ = this.generator.pool.started_workers.fetchAdd(1, .Release); std.Thread.Futex.wake(&this.generator.pool.started_workers, std.math.maxInt(u32)); } } pub fn run(this: *Worker) void { Output.Source.configureThread(); this.thread_id = std.Thread.getCurrentId(); if (Environment.isDebug) { Output.prettyln("Thread started.\n", .{}); } js_ast.Expr.Data.Store.create(this.generator.allocator); js_ast.Stmt.Data.Store.create(this.generator.allocator); this.data = this.generator.allocator.create(WorkerData) catch unreachable; this.data.* = WorkerData{ .log = this.generator.allocator.create(logger.Log) catch unreachable, .estimated_input_lines_of_code = 0, .macro_context = js_ast.Macro.MacroContext.init(this.generator.bundler), }; this.data.log.* = logger.Log.init(this.generator.allocator); this.data.shared_buffer = MutableString.init(this.generator.allocator, 0) catch unreachable; this.data.scan_pass_result = js_parser.ScanPassResult.init(this.generator.allocator); this.data.bundler = this.generator.bundler.*; var bundler_ptr = &this.data.bundler; const CacheSet = @import("../cache.zig"); // no funny business mr. cache bundler_ptr.resolver.caches = CacheSet.Set.init(this.allocator); bundler_ptr.linker.resolver = &bundler_ptr.resolver; bundler_ptr.log = this.data.log; bundler_ptr.linker.log = this.data.log; bundler_ptr.linker.resolver.log = this.data.log; } pub const ProcessFileTask = struct { resolution: _resolver.Result, task: ThreadPoolLib.Task = .{ .callback = &callback }, pub fn callback(task: *ThreadPoolLib.Task) void { var worker = @ptrCast( *ThreadPool.Worker, @alignCast( @alignOf(*ThreadPool.Worker), ThreadPoolLib.Thread.current.?.ctx.?, ), ); var process: *ProcessFileTask = @fieldParentPtr(ProcessFileTask, "task", task); worker.generator.processFile( worker, &worker.data.bundler, process.resolution, ) catch {}; _ = worker.generator.pool.completed_count.fetchAdd(1, .Monotonic); } }; }; }; write_lock: Lock, log_lock: Lock = Lock.init(), module_list: std.ArrayList(Api.JavascriptBundledModule), package_list: std.ArrayList(Api.JavascriptBundledPackage), header_string_buffer: MutableString, // Just need to know if we've already enqueued this one package_list_map: std.AutoHashMap(u64, u32), enqueued_map: std.AutoHashMap(u32, void), queue: BunQueue, bundler: *ThisBundler, allocator: std.mem.Allocator, tmpfile: std.fs.File, log: *logger.Log, pool: *ThreadPool, tmpfile_byte_offset: u32 = 0, code_end_byte_offset: u32 = 0, has_jsx: bool = false, estimated_input_lines_of_code: usize = 0, work_waiter: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), list_lock: Lock = Lock.init(), dynamic_import_file_size_store: U32Map, dynamic_import_file_size_store_lock: Lock, always_bundled_package_hashes: []u32 = &[_]u32{}, always_bundled_package_jsons: []*const PackageJSON = &.{}, always_bundled_booleans: []bool = &.{}, package_bundle_map: options.BundlePackage.Map = options.BundlePackage.Map{}, const U32Map = std.AutoHashMap(u32, u32); pub const current_version: u32 = 1; const dist_index_js_string_pointer = Api.StringPointer{ .length = "dist/index.js".len }; const index_js_string_pointer = Api.StringPointer{ .length = "index.js".len, .offset = "dist/".len }; fn upsert(this: *GenerateNodeModuleBundle, module_id: u32, resolve: _resolver.Result) !void { var dedupe = try this.enqueued_map.getOrPut(module_id); if (dedupe.found_existing) return; var task = try this.allocator.create(ThreadPool.Worker.ProcessFileTask); task.* = ThreadPool.Worker.ProcessFileTask{ .resolution = resolve, }; _ = this.pool.pending_count.fetchAdd(1, .Monotonic); this.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task)); } pub fn ensurePathIsAllocated(this: *GenerateNodeModuleBundle, path_: ?*Fs.Path) !void { var path = path_ orelse return; const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; if (!loader.isJavaScriptLikeOrJSON()) return; path.* = try path.dupeAlloc(this.allocator); } pub fn enqueueItem(this: *GenerateNodeModuleBundle, resolve: _resolver.Result) !void { var result = resolve; var path = result.path() orelse return; const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; if (!loader.isJavaScriptLikeOrJSON()) return; path.* = try path.dupeAlloc(this.allocator); if (BundledModuleData.get(this, &result)) |mod| { try this.upsert(mod.module_id, result); } else { try this.upsert(result.hash(this.bundler.fs.top_level_dir, loader), result); } } // The bun Bundle Format // All the node_modules your app uses in a single compact file with metadata // A binary JavaScript bundle format prioritizing generation time and deserialization time pub const magic_bytes = "#!/usr/bin/env bun\n\n"; // This makes it possible to do ./path-to-bundle on posix systems so you can see the raw JS contents // https://en.wikipedia.org/wiki/Magic_number_(programming)#In_files // Immediately after the magic bytes, the next character is a uint32 followed by a newline // 0x00000000\n // That uint32 denotes the byte offset in the file where the code for the bundle ends // - If the value is 0, that means the file did not finish writing or there are no modules // - This imposes a maximum bundle size of around 4,294,967,295 bytes. If your JS is more than 4 GB, it won't work. // The raw JavaScript is encoded as a UTF-8 string starting from the current position + 1 until the above byte offset. // This uint32 is useful for HTTP servers to separate: // - Which part of the bundle is the JS code? // - Which part is the metadata? // Without needing to do a full pass through the file, or necessarily care about the metadata. // The metadata is at the bottom of the file instead of the top because the metadata is written after all JS code in the bundle is written. // The rationale there is: // 1. We cannot prepend to a file without rewriting the entire file // 2. The metadata is variable-length and that format will change often. // 3. We won't have all the metadata until after all JS is finished writing // If you have 32 MB of JavaScript dependencies, you really want to avoid reading the code in memory. // - This lets you seek to the specific position in the file. // - HTTP servers should use sendfile() instead of copying the file to userspace memory. // So instead, we append metadata to the file after printing each node_module // When there are no more modules to process, we generate the metadata // To find the metadata, you look at the byte offset: initial_header[magic_bytes.len..initial_header.len - 1] // Then, you add that number to initial_header.len const initial_header = brk: { var buf = std.mem.zeroes([magic_bytes.len + 5]u8); // std.mem.copy(u8, &buf, magic_bytes); // var remainder = buf[magic_bytes.len..]; // // Write an invalid byte offset to be updated after we finish generating the code // std.mem.writeIntNative(u32, remainder[0 .. remainder.len - 1], 0); // buf[buf.len - 1] = '\n'; break :brk buf; }; const code_start_byte_offset: u32 = initial_header.len; // The specifics of the metadata is not documented here. You can find it in src/api/schema.peechy. pub fn appendHeaderString(generator: *GenerateNodeModuleBundle, str: string) !Api.StringPointer { // This is so common we might as well just reuse it // Plus this is one machine word so it's a quick comparison if (strings.eqlComptime(str, "index.js")) { return index_js_string_pointer; } else if (strings.eqlComptime(str, "dist/index.js")) { return dist_index_js_string_pointer; } var offset = generator.header_string_buffer.list.items.len; try generator.header_string_buffer.append(str); return Api.StringPointer{ .offset = @truncate(u32, offset), .length = @truncate(u32, str.len), }; } pub fn generate( bundler: *ThisBundler, allocator: std.mem.Allocator, framework_config: ?Api.LoadedFramework, route_config: ?Api.LoadedRouteConfig, destination: [*:0]const u8, estimated_input_lines_of_code: *usize, package_bundle_map: options.BundlePackage.Map, ) !?Api.JavascriptBundleContainer { _ = try bundler.fs.fs.openTmpDir(); var tmpname_buf: [64]u8 = undefined; bundler.resetStore(); try bundler.configureDefines(); const tmpname = try bundler.fs.tmpname( ".bun", std.mem.span(&tmpname_buf), std.hash.Wyhash.hash(@intCast(usize, std.time.milliTimestamp()) % std.math.maxInt(u32), std.mem.span(destination)), ); var tmpfile = Fs.FileSystem.RealFS.Tmpfile{}; try tmpfile.create(&bundler.fs.fs, tmpname); errdefer tmpfile.closeAndDelete(tmpname); var generator = try allocator.create(GenerateNodeModuleBundle); var queue = BunQueue.init(allocator); defer allocator.destroy(generator); generator.* = GenerateNodeModuleBundle{ .module_list = std.ArrayList(Api.JavascriptBundledModule).init(allocator), .package_list = std.ArrayList(Api.JavascriptBundledPackage).init(allocator), .header_string_buffer = try MutableString.init(allocator, "dist/index.js".len), .allocator = allocator, .enqueued_map = std.AutoHashMap(u32, void).init(allocator), .queue = queue, .estimated_input_lines_of_code = 0, // .resolve_queue = queue, .bundler = bundler, .tmpfile = tmpfile.file(), .dynamic_import_file_size_store = U32Map.init(allocator), .dynamic_import_file_size_store_lock = Lock.init(), .log = bundler.log, .package_list_map = std.AutoHashMap(u64, u32).init(allocator), .pool = undefined, .write_lock = Lock.init(), .package_bundle_map = package_bundle_map, }; // dist/index.js appears more common than /index.js // but this means we can store both "dist/index.js" and "index.js" in one. try generator.header_string_buffer.append("dist/index.js"); try generator.package_list_map.ensureTotalCapacity(128); var pool = try allocator.create(ThreadPool); pool.* = ThreadPool{}; generator.pool = pool; var this = generator; // Always inline the runtime into the bundle try generator.appendBytes(&initial_header); if (bundler.log.level == .verbose) { bundler.resolver.debug_logs = try DebugLogs.init(allocator); } Analytics.Features.bun_bun = true; always_bundled: { const root_package_json_resolved: _resolver.Result = bundler.resolver.resolve(bundler.fs.top_level_dir, "./package.json", .stmt) catch { generator.log.addWarning(null, logger.Loc.Empty, "Please run `bun bun` from a directory containing a package.json.") catch unreachable; break :always_bundled; }; const root_package_json = root_package_json_resolved.package_json orelse brk: { const read_dir = (bundler.resolver.readDirInfo(bundler.fs.top_level_dir) catch unreachable).?; Analytics.Features.tsconfig = Analytics.Features.tsconfig or read_dir.tsconfig_json != null; break :brk read_dir.package_json.?; }; Analytics.setProjectID(std.fs.path.dirname(root_package_json.source.path.text) orelse "/", root_package_json.name); if (bundler.macro_context) |macro_ctx| { Analytics.Features.macros = macro_ctx.remap.count() > 0; } const bundle_keys = package_bundle_map.keys(); const do_always_bundle = package_bundle_map.values(); var always_bundle_count: u32 = @truncate(u32, bundle_keys.len); if (always_bundle_count != 0) { Analytics.Features.always_bundle = true; var always_bundled_package_jsons = bundler.allocator.alloc(*PackageJSON, always_bundle_count) catch unreachable; var always_bundled_package_hashes = bundler.allocator.alloc(u32, always_bundle_count) catch unreachable; var always_bundled_booleans = bundler.allocator.alloc(bool, always_bundle_count) catch unreachable; var i: u16 = 0; inner: for (bundle_keys) |name, k| { std.mem.copy(u8, &Bundler.tmp_buildfile_buf, name); std.mem.copy(u8, Bundler.tmp_buildfile_buf[name.len..], "/package.json"); const package_json_import = Bundler.tmp_buildfile_buf[0 .. name.len + "/package.json".len]; const result = bundler.resolver.resolve(bundler.fs.top_level_dir, package_json_import, .stmt) catch |err| { generator.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving always bundled module \"{s}\"", .{ @errorName(err), name }) catch unreachable; continue :inner; }; var package_json: *PackageJSON = result.package_json orelse brk: { const read_dir = (bundler.resolver.readDirInfo(package_json_import) catch unreachable).?; if (read_dir.package_json == null) { generator.log.addWarningFmt(null, logger.Loc.Empty, bundler.allocator, "{s} missing package.json. It will not be bundled", .{name}) catch unreachable; continue :inner; } break :brk read_dir.package_json.?; }; package_json.source.key_path = result.path_pair.primary; // if (!strings.contains(result.path_pair.primary.text, package_json.name)) { // generator.log.addErrorFmt( // null, // logger.Loc.Empty, // bundler.allocator, // "Bundling \"{s}\" is not supported because the package isn.\n To fix this, move the package's code to a directory containing the name.\n Location: \"{s}\"", // .{ // name, // name, // result.path_pair.primary.text, // }, // ) catch unreachable; // continue :inner; // } always_bundled_package_jsons[i] = package_json; always_bundled_package_hashes[i] = package_json.hash; always_bundled_booleans[i] = do_always_bundle[k] == .always; i += 1; } generator.always_bundled_package_hashes = always_bundled_package_hashes[0..i]; generator.always_bundled_package_jsons = always_bundled_package_jsons[0..i]; generator.always_bundled_booleans = always_bundled_booleans[0..i]; } } if (generator.log.errors > 0) return error.BundleFailed; this.bundler.macro_context = js_ast.Macro.MacroContext.init(bundler); const include_refresh_runtime = !this.bundler.options.production and this.bundler.options.jsx.supports_fast_refresh and bundler.options.platform.isWebLike(); if (framework_config != null) { defer this.bundler.resetStore(); try this.bundler.configureFramework(true); if (bundler.options.framework) |framework| { Analytics.Features.framework = true; if (framework.override_modules.keys.len > 0) { bundler.options.framework.?.override_modules_hashes = allocator.alloc(u64, framework.override_modules.keys.len) catch unreachable; for (framework.override_modules.keys) |key, i| { bundler.options.framework.?.override_modules_hashes[i] = std.hash.Wyhash.hash(0, key); } } } } else {} this.pool.start(this) catch |err| { Analytics.enqueue(Analytics.EventName.bundle_fail); return err; }; // The ordering matters here The runtime must appear at the top of // the file But, we don't know which version of the runtime is // necessary until we know if they'll use JSX and if they'll use // react refresh var _new_jsx_runtime_resolve_result: ?_resolver.Result = null; var fast_refresh_resolve_result: ?_resolver.Result = null; // Normally, this is automatic // However, since we only do the parsing pass, it may not get imported automatically. if (bundler.options.jsx.parse) { defer this.bundler.resetStore(); if (this.bundler.resolver.resolve( this.bundler.fs.top_level_dir, this.bundler.options.jsx.import_source, .require, )) |new_jsx_runtime| { _new_jsx_runtime_resolve_result = new_jsx_runtime; this.ensurePathIsAllocated(_new_jsx_runtime_resolve_result.?.path()) catch unreachable; Analytics.Features.jsx = true; } else |_| {} } const include_fast_refresh_in_bundle = Analytics.Features.jsx and include_refresh_runtime and !Analytics.Features.fast_refresh and !bundler.options.platform.isServerSide(); var refresh_runtime_module_id: u32 = 0; if (include_refresh_runtime) { defer this.bundler.resetStore(); if (this.bundler.resolver.resolve( this.bundler.fs.top_level_dir, this.bundler.options.jsx.refresh_runtime, .require, )) |refresh_runtime| { fast_refresh_resolve_result = refresh_runtime; this.ensurePathIsAllocated(fast_refresh_resolve_result.?.path()) catch unreachable; Analytics.Features.fast_refresh = true; if (BundledModuleData.get(this, &refresh_runtime)) |mod| { refresh_runtime_module_id = mod.module_id; } } else |_| {} } if (Environment.isDebug) { switch (bundler.options.platform) { .node => try generator.appendBytes(runtime.Runtime.sourceContentNode()), .bun_macro, .bun => try generator.appendBytes(runtime.Runtime.sourceContentBun()), else => try generator.appendBytes(runtime.Runtime.sourceContent(include_fast_refresh_in_bundle)), } try generator.appendBytes("\n\n"); } else if (include_fast_refresh_in_bundle) { try generator.appendBytes(comptime runtime.Runtime.sourceContent(true) ++ "\n\n"); } else { try generator.appendBytes( switch (bundler.options.platform) { .bun_macro, .bun => comptime @as(string, runtime.Runtime.sourceContentBun() ++ "\n\n"), .node => comptime @as(string, runtime.Runtime.sourceContentNode() ++ "\n\n"), else => comptime @as(string, runtime.Runtime.sourceContentWithoutRefresh() ++ "\n\n"), }, ); } if (_new_jsx_runtime_resolve_result) |new_jsx_runtime| { try this.enqueueItem(new_jsx_runtime); _new_jsx_runtime_resolve_result = null; } if (fast_refresh_resolve_result) |fast_refresh| { try this.enqueueItem(fast_refresh); fast_refresh_resolve_result = null; } if (bundler.router) |router| { defer this.bundler.resetStore(); Analytics.Features.filesystem_router = true; const entry_points = try router.getEntryPoints(); for (entry_points) |entry_point| { const resolved = bundler.resolveEntryPoint(entry_point) catch continue; try this.enqueueItem(resolved); } this.bundler.resetStore(); } else {} if (bundler.options.framework) |framework| { if (bundler.options.platform.isBun()) { if (framework.server.isEnabled()) { Analytics.Features.bunjs = true; const resolved = try bundler.resolver.resolve( bundler.fs.top_level_dir, framework.server.path, .entry_point, ); try this.enqueueItem(resolved); } } else { if (framework.client.isEnabled()) { const resolved = try bundler.resolver.resolve( bundler.fs.top_level_dir, framework.client.path, .entry_point, ); try this.enqueueItem(resolved); } if (framework.fallback.isEnabled()) { const resolved = try bundler.resolver.resolve( bundler.fs.top_level_dir, framework.fallback.path, .entry_point, ); try this.enqueueItem(resolved); } } } for (bundler.options.entry_points) |entry_point| { defer this.bundler.resetStore(); const entry_point_path = bundler.normalizeEntryPointPath(entry_point); const resolved = bundler.resolveEntryPoint(entry_point_path) catch continue; try this.enqueueItem(resolved); } Analytics.enqueue(Analytics.EventName.bundle_start); this.bundler.resetStore(); this.pool.wait(this) catch |err| { Analytics.enqueue(Analytics.EventName.bundle_fail); return err; }; Analytics.enqueue(Analytics.EventName.bundle_success); estimated_input_lines_of_code.* = generator.estimated_input_lines_of_code; // if (comptime !isRelease) { // this.queue.checkDuplicatesSlow(); // } if (this.log.errors > 0) { tmpfile.closeAndDelete(std.mem.span(tmpname)); // We stop here because if there are errors we don't know if the bundle is valid // This manifests as a crash when sorting through the module list because we may have added files to the bundle which were never actually finished being added. return null; } if (this.module_list.items.len == 0) { tmpfile.closeAndDelete(std.mem.span(tmpname)); Output.prettyErrorln( \\error: no dependencies to bundle! \\ \\"bun bun" currently only bundles dependencies in node_modules. \\ , .{}, ); Global.crash(); } if (include_refresh_runtime and refresh_runtime_module_id > 0) { var refresh_runtime_injector_buf: [1024]u8 = undefined; var fixed_buffer = std.io.fixedBufferStream(&refresh_runtime_injector_buf); var fixed_buffer_writer = fixed_buffer.writer(); const hex_bytes = bun.fmt.hexIntLower(refresh_runtime_module_id); fixed_buffer_writer.print( \\if ('window' in globalThis) {{ \\ (function() {{ \\ BUN_RUNTIME.__injectFastRefresh(${any}()); \\ }})(); \\}} , .{hex_bytes}, ) catch unreachable; try this.tmpfile.writeAll(fixed_buffer.buffer[0..fixed_buffer.pos]); } // Ensure we never overflow this.code_end_byte_offset = @truncate( u32, // Doing this math ourself seems to not necessarily produce correct results (try this.tmpfile.getPos()), ); var javascript_bundle_container = std.mem.zeroes(Api.JavascriptBundleContainer); std.sort.sort( Api.JavascriptBundledModule, this.module_list.items, this, GenerateNodeModuleBundle.sortJavascriptModuleByPath, ); if (comptime Environment.isDebug) { const SeenHash = std.AutoHashMap(u64, void); var map = SeenHash.init(this.allocator); var ids = SeenHash.init(this.allocator); try map.ensureTotalCapacity(@truncate(u32, this.module_list.items.len)); try ids.ensureTotalCapacity(@truncate(u32, this.module_list.items.len)); for (this.module_list.items) |a| { const a_pkg: Api.JavascriptBundledPackage = this.package_list.items[a.package_id]; const a_name = this.metadataStringPointer(a_pkg.name); const a_version = this.metadataStringPointer(a_pkg.version); const a_path = this.metadataStringPointer(a.path); std.debug.assert(a_name.len > 0); std.debug.assert(a_version.len > 0); std.debug.assert(a_path.len > 0); var hash_print = std.mem.zeroes([4096]u8); const hash = std.hash.Wyhash.hash(0, std.fmt.bufPrint(&hash_print, "{s}@{s}/{s}", .{ a_name, a_version, a_path }) catch unreachable); var result1 = map.getOrPutAssumeCapacity(hash); std.debug.assert(!result1.found_existing); var result2 = ids.getOrPutAssumeCapacity(a.id); std.debug.assert(!result2.found_existing); } } var hasher = std.hash.Wyhash.init(0); // We want to sort the packages as well as the files // The modules sort the packages already // So can just copy it in the below loop. var sorted_package_list = try allocator.alloc(Api.JavascriptBundledPackage, this.package_list.items.len); // At this point, the module_list is sorted. if (this.module_list.items.len > 0) { var package_id_i: u32 = 0; var i: usize = 0; // Assumption: node_modules are immutable // Assumption: module files are immutable // (They're not. But, for our purposes that's okay) // The etag is: // - The hash of each module's path in sorted order // - The hash of each module's code size in sorted order // - hash(hash(package_name, package_version)) // If this doesn't prove strong enough, we will do a proper content hash // But I want to avoid that overhead unless proven necessary. // There's a good chance we don't even strictly need an etag here. var bytes: [4]u8 = undefined; while (i < this.module_list.items.len) { var current_package_id = this.module_list.items[i].package_id; this.module_list.items[i].package_id = package_id_i; var offset = @truncate(u32, i); i += 1; while (i < this.module_list.items.len and this.module_list.items[i].package_id == current_package_id) : (i += 1) { this.module_list.items[i].package_id = package_id_i; // Hash the file path hasher.update(this.metadataStringPointer(this.module_list.items[i].path)); // Then the length of the code std.mem.writeIntNative(u32, &bytes, this.module_list.items[i].code.length); hasher.update(&bytes); } this.package_list.items[current_package_id].modules_offset = offset; this.package_list.items[current_package_id].modules_length = @truncate(u32, i) - offset; // Hash the hash of the package name // it's hash(hash(package_name, package_version)) std.mem.writeIntNative(u32, &bytes, this.package_list.items[current_package_id].hash); hasher.update(&bytes); sorted_package_list[package_id_i] = this.package_list.items[current_package_id]; package_id_i += 1; } } var javascript_bundle = std.mem.zeroes(Api.JavascriptBundle); javascript_bundle.modules = this.module_list.items; javascript_bundle.packages = sorted_package_list; javascript_bundle.manifest_string = this.header_string_buffer.list.items; const etag_u64 = hasher.final(); // We store the etag as a ascii hex encoded u64 // This is so we can send the bytes directly in the HTTP server instead of formatting it as hex each time. javascript_bundle.etag = try std.fmt.allocPrint(allocator, "{any}", .{bun.fmt.hexIntLower(etag_u64)}); javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp())); const basename = std.fs.path.basename(std.mem.span(destination)); const extname = std.fs.path.extension(basename); javascript_bundle.import_from_name = if (bundler.options.platform.isBun()) "/node_modules.server.bun" else try std.fmt.allocPrint( this.allocator, "/{s}.{s}.bun", .{ basename[0 .. basename.len - extname.len], javascript_bundle.etag, }, ); javascript_bundle_container.bundle_format_version = current_version; javascript_bundle_container.bundle = javascript_bundle; javascript_bundle_container.code_length = this.code_end_byte_offset; javascript_bundle_container.framework = framework_config; javascript_bundle_container.routes = route_config; var start_pos = try this.tmpfile.getPos(); var tmpwriter = std.io.bufferedWriter(this.tmpfile.writer()); const SchemaWriter = schema.Writer(@TypeOf(tmpwriter.writer())); var schema_file_writer = SchemaWriter.init(tmpwriter.writer()); try javascript_bundle_container.encode(&schema_file_writer); try tmpwriter.flush(); // sanity check if (Environment.isDebug) { try this.tmpfile.seekTo(start_pos); var contents = try allocator.alloc(u8, (try this.tmpfile.getEndPos()) - start_pos); var read_bytes = try this.tmpfile.read(contents); var buf = contents[0..read_bytes]; var reader = schema.Reader.init(buf, allocator); var decoder = try Api.JavascriptBundleContainer.decode( &reader, ); std.debug.assert(decoder.code_length.? == javascript_bundle_container.code_length.?); } var code_length_bytes: [4]u8 = undefined; std.mem.writeIntNative(u32, &code_length_bytes, this.code_end_byte_offset); _ = try std.os.pwrite(this.tmpfile.handle, &code_length_bytes, magic_bytes.len); // Without his mutex, we get a crash at this location: // try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, destination); // ^ const top_dir = try std.fs.openDirAbsolute(Fs.FileSystem.instance.top_level_dir, .{}); _ = C.fchmod( this.tmpfile.handle, // chmod 777 0o0000010 | 0o0000100 | 0o0000001 | 0o0001000 | 0o0000040 | 0o0000004 | 0o0000002 | 0o0000400 | 0o0000200 | 0o0000020, ); try tmpfile.promote(tmpname, top_dir.fd, destination); // Print any errors at the end // try this.log.print(Output.errorWriter()); return javascript_bundle_container; } pub fn metadataStringPointer(this: *GenerateNodeModuleBundle, ptr: Api.StringPointer) string { return this.header_string_buffer.list.items[ptr.offset .. ptr.offset + ptr.length]; } // Since we trim the prefixes, we must also compare the package name and version pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool { return switch (std.mem.order( u8, ctx.metadataStringPointer( ctx.package_list.items[a.package_id].name, ), ctx.metadataStringPointer( ctx.package_list.items[b.package_id].name, ), )) { .eq => switch (std.mem.order( u8, ctx.metadataStringPointer( ctx.package_list.items[a.package_id].version, ), ctx.metadataStringPointer( ctx.package_list.items[b.package_id].version, ), )) { .eq => std.mem.order( u8, ctx.metadataStringPointer(a.path), ctx.metadataStringPointer(b.path), ) == .lt, .lt => true, else => false, }, .lt => true, else => false, }; } // pub fn sortJavascriptPackageByName(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledPackage, b: Api.JavascriptBundledPackage) bool { // return std.mem.order(u8, ctx.metadataStringPointer(a.name), ctx.metadataStringPointer(b.name)) == .lt; // } pub fn appendBytes(generator: *GenerateNodeModuleBundle, bytes: anytype) !void { try generator.tmpfile.writeAll(bytes); generator.tmpfile_byte_offset += @truncate(u32, bytes.len); } const BundledModuleData = struct { import_path: string, package_path: string, package: *const PackageJSON, module_id: u32, pub fn getForceBundle(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { return _get(this, resolve_result, true, false); } pub fn getForceBundleForMain(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { return _get(this, resolve_result, true, true); } threadlocal var normalized_package_path: [512]u8 = undefined; threadlocal var normalized_package_path2: [512]u8 = undefined; inline fn _get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result, comptime force: bool, comptime is_main: bool) ?BundledModuleData { const path = resolve_result.pathConst() orelse return null; if (strings.eqlComptime(path.namespace, "node")) { const _import_path = path.text["/bun-vfs/node_modules/".len..][resolve_result.package_json.?.name.len + 1 ..]; return BundledModuleData{ .import_path = _import_path, .package_path = path.text["/bun-vfs/node_modules/".len..], .package = resolve_result.package_json.?, .module_id = resolve_result.package_json.?.hashModule(_import_path), }; } var import_path = path.text; var package_path = path.text; var file_path = path.text; if (resolve_result.package_json) |pkg_| { var pkg: *const PackageJSON = pkg_; if (this.package_bundle_map.get(pkg.name)) |result| { if (result == .never) return null; } if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash)) |pkg_i| { pkg = this.always_bundled_package_jsons[pkg_i]; if (!this.always_bundled_booleans[pkg_i]) return null; const key_path_source_dir = pkg.source.key_path.sourceDir(); const default_source_dir = pkg.source.path.sourceDir(); if (strings.startsWith(path.text, key_path_source_dir)) { import_path = path.text[key_path_source_dir.len..]; } else if (strings.startsWith(path.text, default_source_dir)) { import_path = path.text[default_source_dir.len..]; } else if (strings.startsWith(path.pretty, pkg.name)) { import_path = path.pretty[pkg.name.len + 1 ..]; } var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path; std.mem.copy(u8, buf_to_use, pkg.name); buf_to_use[pkg.name.len] = '/'; std.mem.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1]; return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = pkg, .module_id = pkg.hashModule(package_path), }; } } const root: _resolver.RootPathPair = this.bundler.resolver.rootNodeModulePackageJSON( resolve_result, ) orelse return null; var base_path = root.base_path; const package_json = root.package_json; if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, package_json.hash)) |pkg_i| { var pkg = this.always_bundled_package_jsons[pkg_i]; if (!this.always_bundled_booleans[pkg_i]) return null; const key_path_source_dir = pkg.source.key_path.sourceDir(); const default_source_dir = pkg.source.path.sourceDir(); if (strings.startsWith(path.text, key_path_source_dir)) { import_path = path.text[key_path_source_dir.len..]; } else if (strings.startsWith(path.text, default_source_dir)) { import_path = path.text[default_source_dir.len..]; } else if (strings.startsWith(path.pretty, pkg.name)) { import_path = path.pretty[pkg.name.len + 1 ..]; } var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path; std.mem.copy(u8, buf_to_use, pkg.name); buf_to_use[pkg.name.len] = '/'; std.mem.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1]; return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = pkg, .module_id = pkg.hashModule(package_path), }; } // Easymode: the file path doesn't need to be remapped. if (strings.startsWith(file_path, base_path)) { import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/"); package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/"); std.debug.assert(import_path.len > 0); return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = package_json, .module_id = package_json.hashModule(package_path), }; } if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| { package_path = file_path[i..]; import_path = package_path[package_json.name.len + 1 ..]; std.debug.assert(import_path.len > 0); return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = package_json, .module_id = package_json.hashModule(package_path), }; } if (comptime force) { if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, root.package_json.hash)) |pkg_json_i| { const pkg_json = this.always_bundled_package_jsons[pkg_json_i]; base_path = pkg_json.source.key_path.sourceDir(); if (strings.startsWith(file_path, base_path)) { import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/"); package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/"); std.debug.assert(import_path.len > 0); return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = package_json, .module_id = package_json.hashModule(package_path), }; } if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| { package_path = file_path[i..]; import_path = package_path[package_json.name.len + 1 ..]; std.debug.assert(import_path.len > 0); return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = package_json, .module_id = package_json.hashModule(package_path), }; } // This is our last resort. // The package is supposed to be bundled // package.json says its' in packages/@foo/bar/index.js // The file really is in packages/bar/index.js // But we need the import path to contain the package path for bundling to work // so we fake it // we say "" if (package_json.name[0] == '@') { if (std.mem.indexOfScalar(u8, package_json.name, '/')) |at| { const package_subpath = package_json.name[at + 1 ..]; if (std.mem.lastIndexOf(u8, file_path, package_subpath)) |i| { package_path = this.bundler.fs.dirname_store.print("{s}/{s}", .{ package_json.name, file_path[i + package_subpath.len ..] }) catch unreachable; import_path = package_path[package_json.name.len + 1 ..]; return BundledModuleData{ .import_path = import_path, .package_path = package_path, .package = package_json, .module_id = package_json.hashModule(package_path), }; } } } } } return null; } pub fn get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { return _get(this, resolve_result, false, false); } }; fn writeEmptyModule(this: *GenerateNodeModuleBundle, package_relative_path: string, module_id: u32) !u32 { this.write_lock.lock(); defer this.write_lock.unlock(); var code_offset = @truncate(u32, try this.tmpfile.getPos()); var writer = this.tmpfile.writer(); var buffered = std.io.bufferedWriter(writer); var bufwriter = buffered.writer(); try bufwriter.writeAll("// "); try bufwriter.writeAll(package_relative_path); try bufwriter.writeAll(" (disabled/empty)\nexport var $"); std.fmt.formatInt(module_id, 16, .lower, .{}, bufwriter) catch unreachable; try bufwriter.writeAll(" = () => { var obj = {}; Object.defineProperty(obj, 'default', { value: obj, enumerable: false, configurable: true }, obj); return obj; }; \n"); try buffered.flush(); this.tmpfile_byte_offset = @truncate(u32, try this.tmpfile.getPos()); return code_offset; } fn processImportRecord(_: *GenerateNodeModuleBundle, _: ImportRecord) !void {} var json_ast_symbols = [_]js_ast.Symbol{ js_ast.Symbol{ .original_name = "$$m" }, js_ast.Symbol{ .original_name = "exports" }, js_ast.Symbol{ .original_name = "module" }, js_ast.Symbol{ .original_name = "CONGRATS_YOU_FOUND_A_BUG" }, js_ast.Symbol{ .original_name = "$$bun_runtime_json_parse" }, }; const json_parse_string = "parse"; threadlocal var override_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; pub fn appendToModuleList( this: *GenerateNodeModuleBundle, package: *const PackageJSON, module_id: u32, code_offset: u32, package_relative_path: string, ) !void { this.list_lock.lock(); defer this.list_lock.unlock(); const code_length = @atomicLoad(u32, &this.tmpfile_byte_offset, .SeqCst) - code_offset; if (comptime Environment.isDebug) { std.debug.assert(code_length > 0); std.debug.assert(package.hash != 0); std.debug.assert(package.version.len > 0); std.debug.assert(package.name.len > 0); std.debug.assert(module_id > 0); } var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash); if (!package_get_or_put_entry.found_existing) { package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len); try this.package_list.append( Api.JavascriptBundledPackage{ .name = try this.appendHeaderString(package.name), .version = try this.appendHeaderString(package.version), .hash = package.hash, }, ); this.has_jsx = this.has_jsx or strings.eql(package.name, this.bundler.options.jsx.package_name); } var path_extname_length = @truncate(u8, std.fs.path.extension(package_relative_path).len); try this.module_list.append( Api.JavascriptBundledModule{ .path = try this.appendHeaderString( package_relative_path, ), .path_extname_length = path_extname_length, .package_id = package_get_or_put_entry.value_ptr.*, .id = module_id, .code = Api.StringPointer{ .length = @truncate(u32, code_length), .offset = @truncate(u32, code_offset), }, }, ); } threadlocal var json_e_string: js_ast.E.String = undefined; threadlocal var json_e_call: js_ast.E.Call = undefined; threadlocal var json_e_identifier: js_ast.E.Identifier = undefined; threadlocal var json_call_args: [1]js_ast.Expr = undefined; const PendingImports = std.AutoArrayHashMap(u32, ThreadPool.Worker.ProcessFileTask); pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, bundler: *Bundler, _resolve: _resolver.Result) !void { const resolve = _resolve; if (resolve.is_external) return; var shared_buffer = &worker.data.shared_buffer; var scan_pass_result = &worker.data.scan_pass_result; var file_path = (resolve.pathConst() orelse unreachable).*; var add_to_bundle = brk: { if (resolve.package_json) |package_json| { if (this.package_bundle_map.get(package_json.name)) |result| { break :brk result == .always; } } break :brk resolve.isLikelyNodeModule(); }; const source_dir = file_path.sourceDir(); const loader = bundler.options.loader(file_path.name.ext); const platform = bundler.options.platform; defer scan_pass_result.reset(); defer shared_buffer.reset(); defer this.bundler.resetStore(); var log = worker.data.log; var queue = PendingImports.init(worker.allocator); var __module_data: ?BundledModuleData = null; if (add_to_bundle) { __module_data = BundledModuleData.getForceBundleForMain(this, &resolve); } // If we're in a node_module, build that almost normally if (add_to_bundle and __module_data != null) { const module_data = __module_data.?; var code_offset: u32 = 0; const module_id = module_data.module_id; const package = module_data.package; const package_relative_path = module_data.import_path; file_path.pretty = module_data.package_path; const entry: CacheEntry = brk: { if (this.bundler.options.framework) |framework| { if (framework.override_modules_hashes.len > 0) { const package_relative_path_hash = std.hash.Wyhash.hash(0, module_data.package_path); if (std.mem.indexOfScalar( u64, framework.override_modules_hashes, package_relative_path_hash, )) |index| { const relative_path = [_]string{ framework.resolved_dir, framework.override_modules.values[index], }; var override_path = this.bundler.fs.absBuf( &relative_path, &override_file_path_buf, ); override_file_path_buf[override_path.len] = 0; var override_pathZ = override_file_path_buf[0..override_path.len :0]; break :brk try bundler.resolver.caches.fs.readFileShared( bundler.fs, override_pathZ, 0, null, shared_buffer, ); } } } if (!strings.eqlComptime(file_path.namespace, "node")) break :brk try bundler.resolver.caches.fs.readFileShared( bundler.fs, file_path.textZ(), resolve.dirname_fd, if (resolve.file_fd != 0) resolve.file_fd else null, shared_buffer, ); break :brk CacheEntry{ .contents = NodeFallbackModules.contentsFromPath(file_path.text) orelse "", }; }; var approximate_newline_count: usize = 0; defer worker.data.estimated_input_lines_of_code += approximate_newline_count; // Handle empty files // We can't just ignore them. Sometimes code will try to import it. Often because of TypeScript types. // So we just say it's an empty object. Empty object mimicks what "browser": false does as well. // TODO: optimize this so that all the exports for these are done in one line instead of writing repeatedly if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) { code_offset = try this.writeEmptyModule(module_data.package_path, module_id); } else { var ast: js_ast.Ast = undefined; const source = logger.Source.initRecycledFile( Fs.File{ .path = file_path, .contents = entry.contents, }, bundler.allocator, ) catch return null; switch (loader) { .jsx, .tsx, .js, .ts, => { var jsx = _resolve.jsx; jsx.parse = loader.isJSX(); var opts = js_parser.Parser.Options.init(jsx, loader); opts.transform_require_to_import = false; opts.features.dynamic_require = bundler.options.platform.isBun(); opts.enable_bundling = true; opts.warn_about_unbundled_modules = false; opts.macro_context = &worker.data.macro_context; opts.features.auto_import_jsx = jsx.parse; opts.features.trim_unused_imports = this.bundler.options.trim_unused_imports orelse loader.isTypeScript(); opts.tree_shaking = this.bundler.options.tree_shaking; ast = (bundler.resolver.caches.js.parse( bundler.allocator, opts, bundler.options.define, log, &source, ) catch null) orelse return; approximate_newline_count = ast.approximate_newline_count; if (ast.import_records.len > 0) { for (ast.import_records) |*import_record| { // Don't resolve the runtime if (import_record.is_internal or import_record.is_unused) { continue; } if (bundler.options.platform.isBun()) { if (JSC.DisabledModule.has(import_record.path.text)) { import_record.path.is_disabled = true; import_record.do_commonjs_transform_in_printer = true; import_record.is_bundled = true; continue; } if (JSC.HardcodedModule.Aliases.get(import_record.path.text)) |remapped| { import_record.path.text = remapped; import_record.tag = if (strings.eqlComptime(remapped, "bun")) ImportRecord.Tag.bun else ImportRecord.Tag.hardcoded; import_record.is_bundled = false; continue; } } if (bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |tmp| { var _resolved_import = tmp; if (_resolved_import.is_external) { continue; } var path = _resolved_import.path() orelse { import_record.path.is_disabled = true; import_record.is_bundled = true; continue; }; const loader_ = bundler.options.loader(path.name.ext); if (!loader_.isJavaScriptLikeOrJSON()) { import_record.path.is_disabled = true; import_record.is_bundled = true; continue; } // if (_resolved_import.package_json == null) |pkg_json| { // _resolved_import.package_json = if (pkg_json.hash == resolve.package_json.?.hash) // resolve.package_json // else // _resolved_import.package_json; // } const resolved_import: *const _resolver.Result = &_resolved_import; const _module_data = BundledModuleData.getForceBundle(this, resolved_import) orelse { // if a macro imports code that cannot be bundled // we just silently disable it // because...we need some kind of hook to say "don't bundle this" import_record.path.is_disabled = true; import_record.is_bundled = false; continue; }; import_record.module_id = _module_data.module_id; std.debug.assert(import_record.module_id != 0); import_record.is_bundled = true; path.* = try path.dupeAlloc(this.allocator); import_record.path = path.*; _ = queue.getOrPutValue( _module_data.module_id, .{ .resolution = _resolved_import, }, ) catch unreachable; } else |err| { if (comptime Environment.isDebug) { if (!import_record.handles_import_errors) { Output.prettyErrorln("\n{s} resolving \"{s}\" from \"{s}\"", .{ @errorName(err), import_record.path.text, file_path.text, }); } } // Disable failing packages from being printed. // This may cause broken code to write. // However, doing this means we tell them all the resolve errors // Rather than just the first one. import_record.path.is_disabled = true; switch (err) { error.ModuleNotFound => { const addError = logger.Log.addResolveErrorWithTextDupeMaybeWarn; if (!import_record.handles_import_errors) { if (isPackagePath(import_record.path.text)) { if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { try addError( log, &source, import_record.range, this.allocator, "Could not resolve Node.js builtin: \"{s}\".", .{import_record.path.text}, import_record.kind, platform.isBun(), ); } else { try addError( log, &source, import_record.range, this.allocator, "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?", .{import_record.path.text}, import_record.kind, platform.isBun(), ); } } else if (!platform.isBun()) { try addError( log, &source, import_record.range, this.allocator, "Could not resolve: \"{s}\"", .{ import_record.path.text, }, import_record.kind, platform.isBun(), ); } } }, // assume other errors are already in the log else => {}, } } } } }, .json => { // parse the JSON _only_ to catch errors at build time. const json_parse_result = json_parser.ParseJSONForBundling(&source, worker.data.log, worker.allocator) catch return; if (json_parse_result.tag != .empty) { const expr = brk: { // If it's an ascii string, we just print it out with a big old JSON.parse() if (json_parse_result.tag == .ascii) { json_call_args[0] = js_ast.Expr.init( js_ast.E.String, js_ast.E.String{ .data = source.contents, }, .{ .start = 0 }, ); break :brk js_ast.Expr.init( js_ast.E.Call, js_ast.E.Call{ .target = js_ast.Expr{ .data = .{ .e_identifier = js_ast.E.Identifier{ .ref = Ref.atIndex(json_ast_symbols.len - 1), }, }, .loc = .{ .start = 0 }, }, .args = js_ast.ExprNodeList.init(std.mem.span(&json_call_args)), }, .{ .start = 0 }, ); } else { break :brk json_parse_result.expr; } }; var stmt = js_ast.Stmt.alloc( js_ast.S.ExportDefault, js_ast.S.ExportDefault{ .value = js_ast.StmtOrExpr{ .expr = expr }, .default_name = js_ast.LocRef{ .loc = .{}, .ref = null, }, }, .{ .start = 0 }, ); var stmts = worker.allocator.alloc(js_ast.Stmt, 1) catch unreachable; stmts[0] = stmt; var parts = worker.allocator.alloc(js_ast.Part, 1) catch unreachable; parts[0] = js_ast.Part{ .stmts = stmts }; ast = js_ast.Ast.initTest(parts); ast.runtime_imports = runtime.Runtime.Imports{}; ast.runtime_imports.@"$$m" = .{ .ref = Ref.atIndex(0), .primary = Ref.None, .backup = Ref.None }; ast.runtime_imports.__export = .{ .ref = Ref.atIndex(1), .primary = Ref.None, .backup = Ref.None }; ast.symbols = &json_ast_symbols; ast.module_ref = Ref.atIndex(2); ast.exports_ref = ast.runtime_imports.__export.?.ref; ast.bundle_export_ref = Ref.atIndex(3); } else { var parts = &[_]js_ast.Part{}; ast = js_ast.Ast.initTest(parts); } }, else => { return; }, } switch (ast.parts.len) { // It can be empty after parsing too // A file like this is an example: // "//# sourceMappingURL=validator.js.map" 0 => { code_offset = try this.writeEmptyModule(module_data.package_path, module_id); }, else => { const register_ref = ast.runtime_imports.@"$$m".?.ref; const E = js_ast.E; const Expr = js_ast.Expr; const Stmt = js_ast.Stmt; var prepend_part: js_ast.Part = undefined; var needs_prepend_part = false; if (ast.parts.len > 1) { for (ast.parts) |part| { if (part.tag != .none and part.stmts.len > 0) { prepend_part = part; needs_prepend_part = true; break; } } } var package_path = js_ast.E.String{ .data = module_data.package_path }; var target_identifier = E.Identifier{ .ref = register_ref }; var module_binding = js_ast.B.Identifier{ .ref = ast.module_ref.? }; var exports_binding = js_ast.B.Identifier{ .ref = ast.exports_ref.? }; var part = &ast.parts[ast.parts.len - 1]; var new_stmts: [1]Stmt = undefined; var register_args: [1]Expr = undefined; var cjs_args: [2]js_ast.G.Arg = [_]js_ast.G.Arg{ .{ .binding = .{ .loc = logger.Loc.Empty, .data = .{ .b_identifier = &module_binding }, }, }, .{ .binding = js_ast.Binding{ .loc = logger.Loc.Empty, .data = .{ .b_identifier = &exports_binding }, }, }, }; const module_path_str = js_ast.Expr{ .data = .{ .e_string = &package_path }, .loc = logger.Loc.Empty }; var closure = E.Arrow{ .args = &cjs_args, .body = .{ .loc = logger.Loc.Empty, .stmts = part.stmts, }, }; var properties: [1]js_ast.G.Property = [_]js_ast.G.Property{ .{ .key = module_path_str, .value = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_arrow = &closure } }, }, }; var e_object = E.Object{ .properties = js_ast.G.Property.List.init(&properties), }; // if (!ast.uses_module_ref) { // var symbol = &ast.symbols[ast.module_ref.?.innerIndex()]; // symbol.original_name = "_$$"; // } // $$m(12345, "react", "index.js", function(module, exports) { // }) var accessor = js_ast.E.Index{ .index = module_path_str, .target = js_ast.Expr{ .data = .{ .e_object = &e_object }, .loc = logger.Loc.Empty, } }; register_args[0] = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_index = &accessor } }; var call_register = E.Call{ .target = Expr{ .data = .{ .e_identifier = target_identifier }, .loc = logger.Loc{ .start = 0 }, }, .args = js_ast.ExprNodeList.init(®ister_args), }; var register_expr = Expr{ .loc = call_register.target.loc, .data = .{ .e_call = &call_register } }; var decls: [1]js_ast.G.Decl = undefined; var bundle_export_binding = js_ast.B.Identifier{ .ref = ast.runtime_imports.@"$$m".?.ref }; var binding = js_ast.Binding{ .loc = register_expr.loc, .data = .{ .b_identifier = &bundle_export_binding }, }; decls[0] = js_ast.G.Decl{ .value = register_expr, .binding = binding, }; var export_var = js_ast.S.Local{ .decls = &decls, .is_export = true, }; new_stmts[0] = Stmt{ .loc = register_expr.loc, .data = .{ .s_local = &export_var } }; part.stmts = &new_stmts; var writer = js_printer.NewFileWriter(this.tmpfile); var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols}); // It should only have one part. ast.parts = ast.parts[ast.parts.len - 1 ..]; const write_result = // The difference between these two is `ascii_only` // We try our best to print UTF-8. // However, JavaScriptCore does not accept UTF-8. // It accepts either latin1 characters as unsigned char // or UTF-16 chars as uint16 // We don't want to add a UTF decoding pass to the .bun files // because it's potentially 15 MB of code. // If we store it as UTF-16 directly, then that 15 MB of code becomes 30 MB of code! // lots of code! // if (!bundler.options.platform.isBun()) try js_printer.printCommonJSThreaded( @TypeOf(writer), writer, ast, js_ast.Symbol.Map.initList(symbols), &source, false, js_printer.Options{ .to_module_ref = Ref.RuntimeRef, .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref, .source_path = file_path, .externals = ast.externals, .indent = 0, .require_ref = ast.require_ref, .module_hash = module_id, .runtime_imports = ast.runtime_imports, .prepend_part_value = &prepend_part, .prepend_part_key = if (needs_prepend_part) closure.body.stmts.ptr else null, }, Linker, &bundler.linker, &this.write_lock, std.fs.File, this.tmpfile, std.fs.File.getPos, &this.tmpfile_byte_offset, ) else try js_printer.printCommonJSThreaded( @TypeOf(writer), writer, ast, js_ast.Symbol.Map.initList(symbols), &source, true, js_printer.Options{ .to_module_ref = Ref.RuntimeRef, .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref, .source_path = file_path, .externals = ast.externals, .indent = 0, .require_ref = ast.require_ref, .module_hash = module_id, .runtime_imports = ast.runtime_imports, .prepend_part_value = &prepend_part, .prepend_part_key = if (needs_prepend_part) closure.body.stmts.ptr else null, }, Linker, &bundler.linker, &this.write_lock, std.fs.File, this.tmpfile, std.fs.File.getPos, &this.tmpfile_byte_offset, ); code_offset = write_result.off; }, } } if (comptime Environment.isDebug) { Output.prettyln("{s}@{s}/{s} - {d}:{d} \n", .{ package.name, package.version, package_relative_path, package.hash, module_id }); Output.flush(); std.debug.assert(package_relative_path.len > 0); } try this.appendToModuleList( package, module_id, code_offset, package_relative_path, ); } else { // If it's app code, scan but do not fully parse. switch (loader) { .jsx, .tsx, .js, .ts, => { const entry = bundler.resolver.caches.fs.readFileShared( bundler.fs, file_path.textZ(), resolve.dirname_fd, if (resolve.file_fd != 0) resolve.file_fd else null, shared_buffer, ) catch return; if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) return; const source = logger.Source.initRecycledFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null; var jsx = bundler.options.jsx; jsx.parse = loader.isJSX(); var opts = js_parser.Parser.Options.init(jsx, loader); opts.macro_context = &worker.data.macro_context; opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript(); try bundler.resolver.caches.js.scan( bundler.allocator, scan_pass_result, opts, bundler.options.define, log, &source, ); worker.data.estimated_input_lines_of_code += scan_pass_result.approximate_newline_count; { for (scan_pass_result.import_records.items) |*import_record| { if (import_record.is_internal or import_record.is_unused) { continue; } if (bundler.options.platform.isBun()) { if (JSC.DisabledModule.has(import_record.path.text)) { import_record.path.is_disabled = true; import_record.is_bundled = true; continue; } if (JSC.HardcodedModule.Aliases.get(import_record.path.text)) |remapped| { import_record.path.text = remapped; import_record.tag = if (strings.eqlComptime(remapped, "bun")) ImportRecord.Tag.bun else ImportRecord.Tag.hardcoded; import_record.is_bundled = false; continue; } } var tmp = bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind); if (tmp) |*_resolved_import| { if (_resolved_import.is_external) { continue; } var path = _resolved_import.path() orelse continue; const loader_ = this.bundler.options.loader(path.name.ext); if (!loader_.isJavaScriptLikeOrJSON()) continue; path.* = try path.dupeAlloc(this.allocator); if (BundledModuleData.get(this, _resolved_import)) |mod| { if (comptime !FeatureFlags.bundle_dynamic_import) { if (import_record.kind == .dynamic) continue; } else { // When app code dynamically imports a large file // Don't bundle it. Leave it as a separate file. // The main value from bundling in development is to minimize tiny, waterfall http requests // If you're importing > 100 KB file dynamically, developer is probably explicitly trying to do that. // There's a tradeoff between "I want to minimize page load time" if (import_record.kind == .dynamic) { this.dynamic_import_file_size_store_lock.lock(); defer this.dynamic_import_file_size_store_lock.unlock(); var dynamic_import_file_size = this.dynamic_import_file_size_store.getOrPut(mod.module_id) catch unreachable; if (!dynamic_import_file_size.found_existing) { var fd = _resolved_import.file_fd; var can_close = false; if (fd == 0) { dynamic_import_file_size.value_ptr.* = 0; fd = (std.fs.openFileAbsolute(path.textZ(), .{}) catch |err| { this.log.addRangeWarningFmt( &source, import_record.range, worker.allocator, "{s} opening file: \"{s}\"", .{ @errorName(err), path.text }, ) catch unreachable; continue; }).handle; can_close = true; Fs.FileSystem.setMaxFd(fd); } defer { if (can_close and bundler.fs.fs.needToCloseFiles()) { var _file = std.fs.File{ .handle = fd }; _file.close(); _resolved_import.file_fd = 0; } else if (FeatureFlags.store_file_descriptors) { _resolved_import.file_fd = fd; } } var file = std.fs.File{ .handle = fd }; var stat = file.stat() catch |err| { this.log.addRangeWarningFmt( &source, import_record.range, worker.allocator, "{s} stat'ing file: \"{s}\"", .{ @errorName(err), path.text }, ) catch unreachable; dynamic_import_file_size.value_ptr.* = 0; continue; }; dynamic_import_file_size.value_ptr.* = @truncate(u32, stat.size); } if (dynamic_import_file_size.value_ptr.* > 1024 * 100) continue; } } std.debug.assert(mod.module_id != 0); _ = queue.getOrPutValue( mod.module_id, .{ .resolution = _resolved_import.*, }, ) catch unreachable; } else { _ = queue.getOrPutValue( _resolved_import.hash( this.bundler.fs.top_level_dir, loader_, ), .{ .resolution = _resolved_import.*, }, ) catch unreachable; } } else |err| { switch (err) { error.ModuleNotFound => { if (!import_record.handles_import_errors) { const addError = logger.Log.addResolveErrorWithTextDupeMaybeWarn; if (isPackagePath(import_record.path.text)) { if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { try addError( log, &source, import_record.range, this.allocator, "Could not resolve Node.js builtin: \"{s}\".", .{import_record.path.text}, import_record.kind, platform.isBun(), ); } else { try addError( log, &source, import_record.range, this.allocator, "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?", .{import_record.path.text}, import_record.kind, platform.isBun(), ); } } else if (!platform.isBun()) { try addError( log, &source, import_record.range, this.allocator, "Could not resolve: \"{s}\"", .{ import_record.path.text, }, import_record.kind, platform.isBun(), ); } } }, // assume other errors are already in the log else => {}, } } } } }, else => {}, } } if (queue.count() > 0) { try this.queue.writeItem(queue); } }