diff options
-rw-r--r-- | src/bundler.zig | 4704 | ||||
-rw-r--r-- | src/cache.zig | 497 | ||||
-rw-r--r-- | src/cli/build_command.zig | 2 | ||||
-rw-r--r-- | src/cli/bun_command.zig | 8 | ||||
-rw-r--r-- | src/http.zig | 2 | ||||
-rw-r--r-- | src/javascript/jsc/javascript.zig | 2 | ||||
-rw-r--r-- | src/linker.zig | 1158 | ||||
-rw-r--r-- | src/resolver/resolver.zig | 3628 |
8 files changed, 4949 insertions, 5052 deletions
diff --git a/src/bundler.zig b/src/bundler.zig index b34241f1b..77534e602 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -38,6 +38,9 @@ const NewBunQueue = @import("./bun_queue.zig").NewBunQueue; const NodeFallbackModules = @import("./node_fallbacks.zig"); const CacheEntry = @import("./cache.zig").FsCacheEntry; +const Linker = linker.Linker; +const Resolver = _resolver.Resolver; + // How it works end-to-end // 1. Resolve a file path from input using the resolver // 2. Look at the extension of that file path, and determine a loader @@ -94,1748 +97,1409 @@ pub const ParseResult = struct { empty: bool = false, }; -pub fn NewBundler(cache_files: bool) type { - return struct { - pub const Linker = if (cache_files) linker.Linker else linker.ServeLinker; - pub const Resolver = if (cache_files) _resolver.Resolver else _resolver.ResolverUncached; - const ThisBundler = @This(); +const cache_files = false; - options: options.BundleOptions, - log: *logger.Log, - allocator: *std.mem.Allocator, - result: options.TransformResult = undefined, - resolver: Resolver, - fs: *Fs.FileSystem, - // thread_pool: *ThreadPool, - output_files: std.ArrayList(options.OutputFile), - resolve_results: *ResolveResults, - resolve_queue: ResolveQueue, - elapsed: i128 = 0, - needs_runtime: bool = false, - router: ?Router = null, - - linker: Linker, - timer: Timer = Timer{}, - env: *DotEnv.Loader, - - // must be pointer array because we can't we don't want the source to point to invalid memory if the array size is reallocated - virtual_modules: std.ArrayList(*ClientEntryPoint), - - pub const isCacheEnabled = cache_files; - - pub fn clone(this: *ThisBundler, allocator: *std.mem.Allocator, to: *ThisBundler) !void { - to.* = this.*; - to.setAllocator(allocator); - to.log = try allocator.create(logger.Log); - to.log.* = logger.Log.init(allocator); - to.setLog(to.log); - } +pub const Bundler = struct { + const ThisBundler = @This(); - pub fn setLog(this: *ThisBundler, log: *logger.Log) void { - this.log = log; - this.linker.log = log; - this.resolver.log = log; - } + options: options.BundleOptions, + log: *logger.Log, + allocator: *std.mem.Allocator, + result: options.TransformResult = undefined, + resolver: Resolver, + fs: *Fs.FileSystem, + // thread_pool: *ThreadPool, + output_files: std.ArrayList(options.OutputFile), + resolve_results: *ResolveResults, + resolve_queue: ResolveQueue, + elapsed: i128 = 0, + needs_runtime: bool = false, + router: ?Router = null, + + linker: Linker, + timer: Timer = Timer{}, + env: *DotEnv.Loader, + + // must be pointer array because we can't we don't want the source to point to invalid memory if the array size is reallocated + virtual_modules: std.ArrayList(*ClientEntryPoint), + + pub const isCacheEnabled = cache_files; + + pub fn clone(this: *ThisBundler, allocator: *std.mem.Allocator, to: *ThisBundler) !void { + to.* = this.*; + to.setAllocator(allocator); + to.log = try allocator.create(logger.Log); + to.log.* = logger.Log.init(allocator); + to.setLog(to.log); + } - pub fn setAllocator(this: *ThisBundler, allocator: *std.mem.Allocator) void { - this.allocator = allocator; - this.linker.allocator = allocator; - this.resolver.allocator = allocator; - } + pub fn setLog(this: *ThisBundler, log: *logger.Log) void { + this.log = log; + this.linker.log = log; + this.resolver.log = log; + } - // to_bundle: + pub fn setAllocator(this: *ThisBundler, allocator: *std.mem.Allocator) void { + this.allocator = allocator; + this.linker.allocator = allocator; + this.resolver.allocator = allocator; + } - // thread_pool: *ThreadPool, + // to_bundle: - pub fn init( - allocator: *std.mem.Allocator, - log: *logger.Log, - opts: Api.TransformOptions, - existing_bundle: ?*NodeModuleBundle, - env_loader_: ?*DotEnv.Loader, - ) !ThisBundler { - js_ast.Expr.Data.Store.create(allocator); - js_ast.Stmt.Data.Store.create(allocator); - var fs = try Fs.FileSystem.init1( - allocator, - opts.absolute_working_dir, - ); - const bundle_options = try options.BundleOptions.fromApi( - allocator, - fs, - log, - opts, - existing_bundle, - ); + // thread_pool: *ThreadPool, - var env_loader = env_loader_ orelse brk: { - var map = try allocator.create(DotEnv.Map); - map.* = DotEnv.Map.init(allocator); + pub fn init( + allocator: *std.mem.Allocator, + log: *logger.Log, + opts: Api.TransformOptions, + existing_bundle: ?*NodeModuleBundle, + env_loader_: ?*DotEnv.Loader, + ) !ThisBundler { + js_ast.Expr.Data.Store.create(allocator); + js_ast.Stmt.Data.Store.create(allocator); + var fs = try Fs.FileSystem.init1( + allocator, + opts.absolute_working_dir, + ); + const bundle_options = try options.BundleOptions.fromApi( + allocator, + fs, + log, + opts, + existing_bundle, + ); - var loader = try allocator.create(DotEnv.Loader); - loader.* = DotEnv.Loader.init(map, allocator); - break :brk loader; - }; - // var pool = try allocator.create(ThreadPool); - // try pool.init(ThreadPool.InitConfig{ - // .allocator = allocator, - // }); - var resolve_results = try allocator.create(ResolveResults); - resolve_results.* = ResolveResults.init(allocator); - return ThisBundler{ - .options = bundle_options, - .fs = fs, - .allocator = allocator, - .resolver = Resolver.init1(allocator, log, fs, bundle_options), - .log = log, - // .thread_pool = pool, - .linker = undefined, - .result = options.TransformResult{ .outbase = bundle_options.output_dir }, - .resolve_results = resolve_results, - .resolve_queue = ResolveQueue.init(allocator), - .output_files = std.ArrayList(options.OutputFile).init(allocator), - .virtual_modules = std.ArrayList(*ClientEntryPoint).init(allocator), - .env = env_loader, - }; - } + var env_loader = env_loader_ orelse brk: { + var map = try allocator.create(DotEnv.Map); + map.* = DotEnv.Map.init(allocator); - pub fn configureLinker(bundler: *ThisBundler) void { - bundler.linker = Linker.init( - bundler.allocator, - bundler.log, - &bundler.resolve_queue, - &bundler.options, - &bundler.resolver, - bundler.resolve_results, - bundler.fs, - ); - } + var loader = try allocator.create(DotEnv.Loader); + loader.* = DotEnv.Loader.init(map, allocator); + break :brk loader; + }; + // var pool = try allocator.create(ThreadPool); + // try pool.init(ThreadPool.InitConfig{ + // .allocator = allocator, + // }); + var resolve_results = try allocator.create(ResolveResults); + resolve_results.* = ResolveResults.init(allocator); + return ThisBundler{ + .options = bundle_options, + .fs = fs, + .allocator = allocator, + .resolver = Resolver.init1(allocator, log, fs, bundle_options), + .log = log, + // .thread_pool = pool, + .linker = undefined, + .result = options.TransformResult{ .outbase = bundle_options.output_dir }, + .resolve_results = resolve_results, + .resolve_queue = ResolveQueue.init(allocator), + .output_files = std.ArrayList(options.OutputFile).init(allocator), + .virtual_modules = std.ArrayList(*ClientEntryPoint).init(allocator), + .env = env_loader, + }; + } - pub fn runEnvLoader(this: *ThisBundler) !void { - switch (this.options.env.behavior) { - .prefix, .load_all => { - // Step 1. Load the project root. - var dir: *Fs.FileSystem.DirEntry = ((this.resolver.readDirInfo(this.fs.top_level_dir) catch return) orelse return).getEntries() orelse return; + pub fn configureLinker(bundler: *ThisBundler) void { + bundler.linker = Linker.init( + bundler.allocator, + bundler.log, + &bundler.resolve_queue, + &bundler.options, + &bundler.resolver, + bundler.resolve_results, + bundler.fs, + ); + } - // Process always has highest priority. - this.env.loadProcess(); - if (this.options.production) { - try this.env.load(&this.fs.fs, dir, false); - } else { - try this.env.load(&this.fs.fs, dir, true); - } - }, - else => {}, - } + pub fn runEnvLoader(this: *ThisBundler) !void { + switch (this.options.env.behavior) { + .prefix, .load_all => { + // Step 1. Load the project root. + var dir: *Fs.FileSystem.DirEntry = ((this.resolver.readDirInfo(this.fs.top_level_dir) catch return) orelse return).getEntries() orelse return; + + // Process always has highest priority. + this.env.loadProcess(); + if (this.options.production) { + try this.env.load(&this.fs.fs, dir, false); + } else { + try this.env.load(&this.fs.fs, dir, true); + } + }, + else => {}, } + } - // This must be run after a framework is configured, if a framework is enabled - pub fn configureDefines(this: *ThisBundler) !void { - if (this.options.defines_loaded) { - return; - } + // This must be run after a framework is configured, if a framework is enabled + pub fn configureDefines(this: *ThisBundler) !void { + if (this.options.defines_loaded) { + return; + } - try this.runEnvLoader(); + try this.runEnvLoader(); - js_ast.Expr.Data.Store.create(this.allocator); - js_ast.Stmt.Data.Store.create(this.allocator); - defer js_ast.Expr.Data.Store.reset(); - defer js_ast.Stmt.Data.Store.reset(); + js_ast.Expr.Data.Store.create(this.allocator); + js_ast.Stmt.Data.Store.create(this.allocator); + defer js_ast.Expr.Data.Store.reset(); + defer js_ast.Stmt.Data.Store.reset(); - if (this.options.framework) |framework| { - if (this.options.platform.isClient()) { - try this.options.loadDefines(this.allocator, this.env, &framework.client.env); - } else { - try this.options.loadDefines(this.allocator, this.env, &framework.server.env); - } + if (this.options.framework) |framework| { + if (this.options.platform.isClient()) { + try this.options.loadDefines(this.allocator, this.env, &framework.client.env); } else { - try this.options.loadDefines(this.allocator, this.env, &this.options.env); + try this.options.loadDefines(this.allocator, this.env, &framework.server.env); } + } else { + try this.options.loadDefines(this.allocator, this.env, &this.options.env); } + } - pub fn configureFramework( - this: *ThisBundler, - comptime load_defines: bool, - ) !void { - if (this.options.framework) |*framework| { - if (framework.needsResolveFromPackage()) { - var route_config = this.options.routes; - var pair = PackageJSON.FrameworkRouterPair{ .framework = framework, .router = &route_config }; + pub fn configureFramework( + this: *ThisBundler, + comptime load_defines: bool, + ) !void { + if (this.options.framework) |*framework| { + if (framework.needsResolveFromPackage()) { + var route_config = this.options.routes; + var pair = PackageJSON.FrameworkRouterPair{ .framework = framework, .router = &route_config }; - if (framework.development) { - try this.resolver.resolveFramework(framework.package, &pair, .development, load_defines); - } else { - try this.resolver.resolveFramework(framework.package, &pair, .production, load_defines); - } + if (framework.development) { + try this.resolver.resolveFramework(framework.package, &pair, .development, load_defines); + } else { + try this.resolver.resolveFramework(framework.package, &pair, .production, load_defines); + } - if (this.options.areDefinesUnset()) { - if (this.options.platform.isClient()) { - this.options.env = framework.client.env; - } else { - this.options.env = framework.server.env; - } + if (this.options.areDefinesUnset()) { + if (this.options.platform.isClient()) { + this.options.env = framework.client.env; + } else { + this.options.env = framework.server.env; } + } - if (pair.loaded_routes) { - this.options.routes = route_config; - } - framework.resolved = true; - this.options.framework = framework.*; - } else if (!framework.resolved) { - Global.panic("directly passing framework path is not implemented yet!", .{}); + if (pair.loaded_routes) { + this.options.routes = route_config; } + framework.resolved = true; + this.options.framework = framework.*; + } else if (!framework.resolved) { + Global.panic("directly passing framework path is not implemented yet!", .{}); } } + } - pub fn configureFrameworkWithResolveResult(this: *ThisBundler, comptime client: bool) !?_resolver.Result { - if (this.options.framework != null) { - try this.configureFramework(true); - if (comptime client) { - if (this.options.framework.?.client.isEnabled()) { - return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.client.path, .stmt); - } + pub fn configureFrameworkWithResolveResult(this: *ThisBundler, comptime client: bool) !?_resolver.Result { + if (this.options.framework != null) { + try this.configureFramework(true); + if (comptime client) { + if (this.options.framework.?.client.isEnabled()) { + return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.client.path, .stmt); + } - if (this.options.framework.?.fallback.isEnabled()) { - return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.fallback.path, .stmt); - } - } else { - if (this.options.framework.?.server.isEnabled()) { - return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.server, .stmt); - } + if (this.options.framework.?.fallback.isEnabled()) { + return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.fallback.path, .stmt); + } + } else { + if (this.options.framework.?.server.isEnabled()) { + return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.server, .stmt); } } - - return null; } - pub fn configureRouter(this: *ThisBundler, comptime load_defines: bool) !void { - try this.configureFramework(load_defines); - defer { - if (load_defines) { - this.configureDefines() catch {}; - } + return null; + } + + pub fn configureRouter(this: *ThisBundler, comptime load_defines: bool) !void { + try this.configureFramework(load_defines); + defer { + if (load_defines) { + this.configureDefines() catch {}; } + } - // if you pass just a directory, activate the router configured for the pages directory - // for now: - // - "." is not supported - // - multiple pages directories is not supported - if (!this.options.routes.routes_enabled and this.options.entry_points.len == 1 and !this.options.serve) { - - // When inferring: - // - pages directory with a file extension is not supported. e.g. "pages.app/" won't work. - // This is a premature optimization to avoid this magical auto-detection we do here from meaningfully increasing startup time if you're just passing a file - // readDirInfo is a recursive lookup, top-down instead of bottom-up. It opens each folder handle and potentially reads the package.jsons - // So it is not fast! Unless it's already cached. - var paths = [_]string{std.mem.trimLeft(u8, this.options.entry_points[0], "./")}; - if (std.mem.indexOfScalar(u8, paths[0], '.') == null) { - var pages_dir_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; - var entry = this.fs.absBuf(&paths, &pages_dir_buf); - - if (std.fs.path.extension(entry).len == 0) { - allocators.constStrToU8(entry).ptr[entry.len] = '/'; - - // Only throw if they actually passed in a route config and the directory failed to load - var dir_info_ = this.resolver.readDirInfo(entry) catch return; - var dir_info = dir_info_ orelse return; - - this.options.routes.dir = dir_info.abs_path; - this.options.routes.extensions = std.mem.span(&options.RouteConfig.DefaultExtensions); - this.options.routes.routes_enabled = true; - this.router = try Router.init(this.fs, this.allocator, this.options.routes); - try this.router.?.loadRoutes( - dir_info, - Resolver, - &this.resolver, - std.math.maxInt(u16), - true, - ); - this.router.?.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled(); - return; - } + // if you pass just a directory, activate the router configured for the pages directory + // for now: + // - "." is not supported + // - multiple pages directories is not supported + if (!this.options.routes.routes_enabled and this.options.entry_points.len == 1 and !this.options.serve) { + + // When inferring: + // - pages directory with a file extension is not supported. e.g. "pages.app/" won't work. + // This is a premature optimization to avoid this magical auto-detection we do here from meaningfully increasing startup time if you're just passing a file + // readDirInfo is a recursive lookup, top-down instead of bottom-up. It opens each folder handle and potentially reads the package.jsons + // So it is not fast! Unless it's already cached. + var paths = [_]string{std.mem.trimLeft(u8, this.options.entry_points[0], "./")}; + if (std.mem.indexOfScalar(u8, paths[0], '.') == null) { + var pages_dir_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + var entry = this.fs.absBuf(&paths, &pages_dir_buf); + + if (std.fs.path.extension(entry).len == 0) { + allocators.constStrToU8(entry).ptr[entry.len] = '/'; + + // Only throw if they actually passed in a route config and the directory failed to load + var dir_info_ = this.resolver.readDirInfo(entry) catch return; + var dir_info = dir_info_ orelse return; + + this.options.routes.dir = dir_info.abs_path; + this.options.routes.extensions = std.mem.span(&options.RouteConfig.DefaultExtensions); + this.options.routes.routes_enabled = true; + this.router = try Router.init(this.fs, this.allocator, this.options.routes); + try this.router.?.loadRoutes( + dir_info, + Resolver, + &this.resolver, + std.math.maxInt(u16), + true, + ); + this.router.?.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled(); + return; } - } else if (this.options.routes.routes_enabled) { - var dir_info_ = try this.resolver.readDirInfo(this.options.routes.dir); - var dir_info = dir_info_ orelse return error.MissingRoutesDir; - - this.options.routes.dir = dir_info.abs_path; - - this.router = try Router.init(this.fs, this.allocator, this.options.routes); - try this.router.?.loadRoutes(dir_info, Resolver, &this.resolver, std.math.maxInt(u16), true); - this.router.?.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled(); - return; } + } else if (this.options.routes.routes_enabled) { + var dir_info_ = try this.resolver.readDirInfo(this.options.routes.dir); + var dir_info = dir_info_ orelse return error.MissingRoutesDir; - // If we get this far, it means they're trying to run the bundler without a preconfigured router - if (this.options.entry_points.len > 0) { - this.options.routes.routes_enabled = false; - } + this.options.routes.dir = dir_info.abs_path; - if (this.router) |*router| { - router.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled(); - } + this.router = try Router.init(this.fs, this.allocator, this.options.routes); + try this.router.?.loadRoutes(dir_info, Resolver, &this.resolver, std.math.maxInt(u16), true); + this.router.?.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled(); + return; } - pub fn resetStore(bundler: *ThisBundler) void { - js_ast.Expr.Data.Store.reset(); - js_ast.Stmt.Data.Store.reset(); + // If we get this far, it means they're trying to run the bundler without a preconfigured router + if (this.options.entry_points.len > 0) { + this.options.routes.routes_enabled = false; } - pub const GenerateNodeModuleBundle = struct { - const BunQueue = NewBunQueue(_resolver.Result); - - pub const ThreadPool = struct { - // Hardcode 512 as max number of threads for now. - workers: [512]Worker = undefined, - workers_used: u32 = 0, - cpu_count: u32 = 0, - started_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), - stopped_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), - completed_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), - pub fn start(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void { - generator.bundler.env.loadProcess(); - - this.cpu_count = @truncate(u32, @divFloor((try std.Thread.getCpuCount()) + 1, 2)); - - if (generator.bundler.env.map.get("GOMAXPROCS")) |max_procs| { - if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count| { - this.cpu_count = std.math.min(this.cpu_count, cpu_count); - } else |err| {} - } + if (this.router) |*router| { + router.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled(); + } + } - if (this.cpu_count <= 1) return; + pub fn resetStore(bundler: *ThisBundler) void { + js_ast.Expr.Data.Store.reset(); + js_ast.Stmt.Data.Store.reset(); + } - while (this.workers_used < this.cpu_count) : (this.workers_used += 1) { - try this.workers[this.workers_used].init(generator); - } + pub const GenerateNodeModuleBundle = struct { + const BunQueue = NewBunQueue(_resolver.Result); + + pub const ThreadPool = struct { + // Hardcode 512 as max number of threads for now. + workers: [512]Worker = undefined, + workers_used: u32 = 0, + cpu_count: u32 = 0, + started_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), + stopped_workers: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), + completed_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), + pub fn start(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void { + generator.bundler.env.loadProcess(); + + this.cpu_count = @truncate(u32, @divFloor((try std.Thread.getCpuCount()) + 1, 2)); + + if (generator.bundler.env.map.get("GOMAXPROCS")) |max_procs| { + if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count| { + this.cpu_count = std.math.min(this.cpu_count, cpu_count); + } else |err| {} } - pub fn wait(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void { - if (this.cpu_count <= 1) { - var worker = generator.allocator.create(Worker) catch unreachable; - worker.* = Worker{ - .generator = generator, - .allocator = generator.allocator, - .data = generator.allocator.create(Worker.WorkerData) catch unreachable, - .thread_id = undefined, - .thread = undefined, - }; - worker.data.shared_buffer = try MutableString.init(generator.allocator, 0); - worker.data.scan_pass_result = js_parser.ScanPassResult.init(generator.allocator); - worker.data.log = generator.log; + if (this.cpu_count <= 1) return; - defer { - worker.data.deinit(generator.allocator); - } + while (this.workers_used < this.cpu_count) : (this.workers_used += 1) { + try this.workers[this.workers_used].init(generator); + } + } - while (generator.queue.next()) |item| { - try generator.processFile(worker, item); - } + pub fn wait(this: *ThreadPool, generator: *GenerateNodeModuleBundle) !void { + if (this.cpu_count <= 1) { + var worker = generator.allocator.create(Worker) catch unreachable; + worker.* = Worker{ + .generator = generator, + .allocator = generator.allocator, + .data = generator.allocator.create(Worker.WorkerData) catch unreachable, + .thread_id = undefined, + .thread = undefined, + }; + worker.data.shared_buffer = try MutableString.init(generator.allocator, 0); + worker.data.scan_pass_result = js_parser.ScanPassResult.init(generator.allocator); + worker.data.log = generator.log; - generator.estimated_input_lines_of_code = worker.data.estimated_input_lines_of_code; - return; + defer { + worker.data.deinit(generator.allocator); } - while (generator.queue.count.load(.SeqCst) != generator.pool.completed_count.load(.SeqCst)) { - var j: usize = 0; - while (j < 100) : (j += 1) {} - std.atomic.spinLoopHint(); + while (generator.queue.next()) |item| { + try generator.processFile(worker, item); } - for (this.workers[0..this.workers_used]) |*worker| { - @atomicStore(bool, &worker.quit, true, .Release); - } + generator.estimated_input_lines_of_code = worker.data.estimated_input_lines_of_code; + return; + } - while (this.stopped_workers.load(.Acquire) != this.workers_used) { - var j: usize = 0; - while (j < 100) : (j += 1) {} - std.atomic.spinLoopHint(); - } + while (generator.queue.count.load(.SeqCst) != generator.pool.completed_count.load(.SeqCst)) { + var j: usize = 0; + while (j < 100) : (j += 1) {} + std.atomic.spinLoopHint(); + } - for (this.workers[0..this.workers_used]) |*worker| { - worker.thread.join(); - } + for (this.workers[0..this.workers_used]) |*worker| { + @atomicStore(bool, &worker.quit, true, .Release); } - pub const Task = struct { - result: _resolver.Result, - generator: *GenerateNodeModuleBundle, - }; + while (this.stopped_workers.load(.Acquire) != this.workers_used) { + var j: usize = 0; + while (j < 100) : (j += 1) {} + std.atomic.spinLoopHint(); + } - pub const Worker = struct { - thread_id: std.Thread.Id, - thread: std.Thread, + for (this.workers[0..this.workers_used]) |*worker| { + worker.thread.join(); + } + } - allocator: *std.mem.Allocator, - generator: *GenerateNodeModuleBundle, - data: *WorkerData = undefined, - quit: bool = false, + pub const Task = struct { + result: _resolver.Result, + generator: *GenerateNodeModuleBundle, + }; - has_notify_started: bool = false, + pub const Worker = struct { + thread_id: std.Thread.Id, + thread: std.Thread, - pub const WorkerData = struct { - shared_buffer: MutableString = undefined, - scan_pass_result: js_parser.ScanPassResult = undefined, - log: *logger.Log, - estimated_input_lines_of_code: usize = 0, + allocator: *std.mem.Allocator, + generator: *GenerateNodeModuleBundle, + data: *WorkerData = undefined, + quit: bool = false, + + has_notify_started: bool = false, + + pub const WorkerData = struct { + shared_buffer: MutableString = undefined, + scan_pass_result: js_parser.ScanPassResult = undefined, + log: *logger.Log, + estimated_input_lines_of_code: usize = 0, + + pub fn deinit(this: *WorkerData, allocator: *std.mem.Allocator) void { + this.shared_buffer.deinit(); + this.scan_pass_result.named_imports.deinit(); + this.scan_pass_result.import_records.deinit(); + allocator.destroy(this); + } + }; - pub fn deinit(this: *WorkerData, allocator: *std.mem.Allocator) void { - this.shared_buffer.deinit(); - this.scan_pass_result.named_imports.deinit(); - this.scan_pass_result.import_records.deinit(); - allocator.destroy(this); - } - }; + pub fn init(worker: *Worker, generator: *GenerateNodeModuleBundle) !void { + worker.generator = generator; + worker.allocator = generator.allocator; + worker.thread = try std.Thread.spawn(.{}, Worker.run, .{worker}); + } - pub fn init(worker: *Worker, generator: *GenerateNodeModuleBundle) !void { - worker.generator = generator; - worker.allocator = generator.allocator; - worker.thread = try std.Thread.spawn(.{}, Worker.run, .{worker}); + pub fn notifyStarted(this: *Worker) void { + if (!this.has_notify_started) { + this.has_notify_started = true; + _ = this.generator.pool.started_workers.fetchAdd(1, .Release); + std.Thread.Futex.wake(&this.generator.pool.started_workers, std.math.maxInt(u32)); } + } - pub fn notifyStarted(this: *Worker) void { - if (!this.has_notify_started) { - this.has_notify_started = true; - _ = this.generator.pool.started_workers.fetchAdd(1, .Release); - std.Thread.Futex.wake(&this.generator.pool.started_workers, std.math.maxInt(u32)); - } + pub fn run(this: *Worker) void { + Output.Source.configureThread(); + this.thread_id = std.Thread.getCurrentId(); + if (isDebug) { + Output.prettyln("Thread started.\n", .{}); } - - pub fn run(this: *Worker) void { - Output.Source.configureThread(); - this.thread_id = std.Thread.getCurrentId(); + defer { if (isDebug) { - Output.prettyln("Thread started.\n", .{}); + Output.prettyln("Thread stopped.\n", .{}); } - defer { - if (isDebug) { - Output.prettyln("Thread stopped.\n", .{}); - } - Output.flush(); - } - - this.loop() catch |err| { - Output.prettyErrorln("<r><red>Error: {s}<r>", .{@errorName(err)}); - }; + Output.flush(); } - pub fn loop(this: *Worker) anyerror!void { - defer { - _ = this.generator.pool.stopped_workers.fetchAdd(1, .Release); - this.notifyStarted(); + this.loop() catch |err| { + Output.prettyErrorln("<r><red>Error: {s}<r>", .{@errorName(err)}); + }; + } - std.Thread.Futex.wake(&this.generator.pool.stopped_workers, 1); - // std.Thread.Futex.wake(&this.generator.queue.len, std.math.maxInt(u32)); - } + pub fn loop(this: *Worker) anyerror!void { + defer { + _ = this.generator.pool.stopped_workers.fetchAdd(1, .Release); + this.notifyStarted(); - js_ast.Expr.Data.Store.create(this.generator.allocator); - js_ast.Stmt.Data.Store.create(this.generator.allocator); - this.data = this.generator.allocator.create(WorkerData) catch unreachable; - this.data.* = WorkerData{ - .log = this.generator.allocator.create(logger.Log) catch unreachable, - .estimated_input_lines_of_code = 0, - }; - this.data.log.* = logger.Log.init(this.generator.allocator); - this.data.shared_buffer = try MutableString.init(this.generator.allocator, 0); - this.data.scan_pass_result = js_parser.ScanPassResult.init(this.generator.allocator); - - defer { - { - this.generator.log_lock.lock(); - this.data.log.appendTo(this.generator.log) catch {}; - this.generator.estimated_input_lines_of_code += this.data.estimated_input_lines_of_code; - this.generator.log_lock.unlock(); - } + std.Thread.Futex.wake(&this.generator.pool.stopped_workers, 1); + // std.Thread.Futex.wake(&this.generator.queue.len, std.math.maxInt(u32)); + } - this.data.deinit(this.generator.allocator); + js_ast.Expr.Data.Store.create(this.generator.allocator); + js_ast.Stmt.Data.Store.create(this.generator.allocator); + this.data = this.generator.allocator.create(WorkerData) catch unreachable; + this.data.* = WorkerData{ + .log = this.generator.allocator.create(logger.Log) catch unreachable, + .estimated_input_lines_of_code = 0, + }; + this.data.log.* = logger.Log.init(this.generator.allocator); + this.data.shared_buffer = try MutableString.init(this.generator.allocator, 0); + this.data.scan_pass_result = js_parser.ScanPassResult.init(this.generator.allocator); + + defer { + { + this.generator.log_lock.lock(); + this.data.log.appendTo(this.generator.log) catch {}; + this.generator.estimated_input_lines_of_code += this.data.estimated_input_lines_of_code; + this.generator.log_lock.unlock(); } - this.notifyStarted(); + this.data.deinit(this.generator.allocator); + } - while (!@atomicLoad(bool, &this.quit, .Acquire)) { - while (this.generator.queue.next()) |item| { - defer { - _ = this.generator.pool.completed_count.fetchAdd(1, .Release); - } + this.notifyStarted(); - try this.generator.processFile(this, item); + while (!@atomicLoad(bool, &this.quit, .Acquire)) { + while (this.generator.queue.next()) |item| { + defer { + _ = this.generator.pool.completed_count.fetchAdd(1, .Release); } + + try this.generator.processFile(this, item); } } - }; - }; - write_lock: Lock, - log_lock: Lock = Lock.init(), - module_list: std.ArrayList(Api.JavascriptBundledModule), - package_list: std.ArrayList(Api.JavascriptBundledPackage), - header_string_buffer: MutableString, - - // Just need to know if we've already enqueued this one - package_list_map: std.AutoHashMap(u64, u32), - queue: *BunQueue, - bundler: *ThisBundler, - allocator: *std.mem.Allocator, - tmpfile: std.fs.File, - log: *logger.Log, - pool: *ThreadPool, - tmpfile_byte_offset: u32 = 0, - code_end_byte_offset: u32 = 0, - has_jsx: bool = false, - estimated_input_lines_of_code: usize = 0, - - work_waiter: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), - list_lock: Lock = Lock.init(), - - dynamic_import_file_size_store: U32Map, - dynamic_import_file_size_store_lock: Lock, - - always_bundled_package_hashes: []u32 = &[_]u32{}, - always_bundled_package_jsons: []*const PackageJSON = &.{}, - - const U32Map = std.AutoHashMap(u32, u32); - pub const current_version: u32 = 1; - const dist_index_js_string_pointer = Api.StringPointer{ .length = "dist/index.js".len }; - const index_js_string_pointer = Api.StringPointer{ .length = "index.js".len, .offset = "dist/".len }; - - pub fn enqueueItem(this: *GenerateNodeModuleBundle, resolve: _resolver.Result) !void { - var result = resolve; - var path = result.path() orelse return; - - const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; - if (!loader.isJavaScriptLikeOrJSON()) return; - path.* = try path.dupeAlloc(this.allocator); - - if (BundledModuleData.get(this, &result)) |mod| { - try this.queue.upsert(mod.module_id, result); - } else { - try this.queue.upsert(result.hash(this.bundler.fs.top_level_dir, loader), result); } - } - - // The Bun Bundle Format - // All the node_modules your app uses in a single compact file with metadata - // A binary JavaScript bundle format prioritizing generation time and deserialization time - pub const magic_bytes = "#!/usr/bin/env bun\n\n"; - // This makes it possible to do ./path-to-bundle on posix systems so you can see the raw JS contents - // https://en.wikipedia.org/wiki/Magic_number_(programming)#In_files - // Immediately after the magic bytes, the next character is a uint32 followed by a newline - // 0x00000000\n - // That uint32 denotes the byte offset in the file where the code for the bundle ends - // - If the value is 0, that means the file did not finish writing or there are no modules - // - This imposes a maximum bundle size of around 4,294,967,295 bytes. If your JS is more than 4 GB, it won't work. - // The raw JavaScript is encoded as a UTF-8 string starting from the current position + 1 until the above byte offset. - // This uint32 is useful for HTTP servers to separate: - // - Which part of the bundle is the JS code? - // - Which part is the metadata? - // Without needing to do a full pass through the file, or necessarily care about the metadata. - // The metadata is at the bottom of the file instead of the top because the metadata is written after all JS code in the bundle is written. - // The rationale there is: - // 1. We cannot prepend to a file without rewriting the entire file - // 2. The metadata is variable-length and that format will change often. - // 3. We won't have all the metadata until after all JS is finished writing - // If you have 32 MB of JavaScript dependencies, you really want to avoid reading the code in memory. - // - This lets you seek to the specific position in the file. - // - HTTP servers should use sendfile() instead of copying the file to userspace memory. - // So instead, we append metadata to the file after printing each node_module - // When there are no more modules to process, we generate the metadata - // To find the metadata, you look at the byte offset: initial_header[magic_bytes.len..initial_header.len - 1] - // Then, you add that number to initial_header.len - const initial_header = brk: { - var buf = std.mem.zeroes([magic_bytes.len + 5]u8); - std.mem.copy(u8, &buf, magic_bytes); - var remainder = buf[magic_bytes.len..]; - // Write an invalid byte offset to be updated after we finish generating the code - std.mem.writeIntNative(u32, remainder[0 .. remainder.len - 1], 0); - buf[buf.len - 1] = '\n'; - break :brk buf; }; - const code_start_byte_offset: u32 = initial_header.len; - // The specifics of the metadata is not documented here. You can find it in src/api/schema.peechy. - - pub fn appendHeaderString(generator: *GenerateNodeModuleBundle, str: string) !Api.StringPointer { - // This is so common we might as well just reuse it - // Plus this is one machine word so it's a quick comparison - if (strings.eqlComptime(str, "index.js")) { - return index_js_string_pointer; - } else if (strings.eqlComptime(str, "dist/index.js")) { - return dist_index_js_string_pointer; - } + }; + write_lock: Lock, + log_lock: Lock = Lock.init(), + module_list: std.ArrayList(Api.JavascriptBundledModule), + package_list: std.ArrayList(Api.JavascriptBundledPackage), + header_string_buffer: MutableString, + + // Just need to know if we've already enqueued this one + package_list_map: std.AutoHashMap(u64, u32), + queue: *BunQueue, + bundler: *ThisBundler, + allocator: *std.mem.Allocator, + tmpfile: std.fs.File, + log: *logger.Log, + pool: *ThreadPool, + tmpfile_byte_offset: u32 = 0, + code_end_byte_offset: u32 = 0, + has_jsx: bool = false, + estimated_input_lines_of_code: usize = 0, - var offset = generator.header_string_buffer.list.items.len; - try generator.header_string_buffer.append(str); - return Api.StringPointer{ - .offset = @truncate(u32, offset), - .length = @truncate(u32, str.len), - }; + work_waiter: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), + list_lock: Lock = Lock.init(), + + dynamic_import_file_size_store: U32Map, + dynamic_import_file_size_store_lock: Lock, + + always_bundled_package_hashes: []u32 = &[_]u32{}, + always_bundled_package_jsons: []*const PackageJSON = &.{}, + + const U32Map = std.AutoHashMap(u32, u32); + pub const current_version: u32 = 1; + const dist_index_js_string_pointer = Api.StringPointer{ .length = "dist/index.js".len }; + const index_js_string_pointer = Api.StringPointer{ .length = "index.js".len, .offset = "dist/".len }; + + pub fn enqueueItem(this: *GenerateNodeModuleBundle, resolve: _resolver.Result) !void { + var result = resolve; + var path = result.path() orelse return; + + const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; + if (!loader.isJavaScriptLikeOrJSON()) return; + path.* = try path.dupeAlloc(this.allocator); + + if (BundledModuleData.get(this, &result)) |mod| { + try this.queue.upsert(mod.module_id, result); + } else { + try this.queue.upsert(result.hash(this.bundler.fs.top_level_dir, loader), result); } + } - pub fn generate( - bundler: *ThisBundler, - allocator: *std.mem.Allocator, - framework_config: ?Api.LoadedFramework, - route_config: ?Api.LoadedRouteConfig, - destination: [*:0]const u8, - estimated_input_lines_of_code: *usize, - ) !?Api.JavascriptBundleContainer { - var tmpdir: std.fs.Dir = try bundler.fs.fs.openTmpDir(); - var tmpname_buf: [64]u8 = undefined; - bundler.resetStore(); - try bundler.configureDefines(); - - const tmpname = try bundler.fs.tmpname( - ".bun", - std.mem.span(&tmpname_buf), - std.hash.Wyhash.hash(0, std.mem.span(destination)), - ); + // The Bun Bundle Format + // All the node_modules your app uses in a single compact file with metadata + // A binary JavaScript bundle format prioritizing generation time and deserialization time + pub const magic_bytes = "#!/usr/bin/env bun\n\n"; + // This makes it possible to do ./path-to-bundle on posix systems so you can see the raw JS contents + // https://en.wikipedia.org/wiki/Magic_number_(programming)#In_files + // Immediately after the magic bytes, the next character is a uint32 followed by a newline + // 0x00000000\n + // That uint32 denotes the byte offset in the file where the code for the bundle ends + // - If the value is 0, that means the file did not finish writing or there are no modules + // - This imposes a maximum bundle size of around 4,294,967,295 bytes. If your JS is more than 4 GB, it won't work. + // The raw JavaScript is encoded as a UTF-8 string starting from the current position + 1 until the above byte offset. + // This uint32 is useful for HTTP servers to separate: + // - Which part of the bundle is the JS code? + // - Which part is the metadata? + // Without needing to do a full pass through the file, or necessarily care about the metadata. + // The metadata is at the bottom of the file instead of the top because the metadata is written after all JS code in the bundle is written. + // The rationale there is: + // 1. We cannot prepend to a file without rewriting the entire file + // 2. The metadata is variable-length and that format will change often. + // 3. We won't have all the metadata until after all JS is finished writing + // If you have 32 MB of JavaScript dependencies, you really want to avoid reading the code in memory. + // - This lets you seek to the specific position in the file. + // - HTTP servers should use sendfile() instead of copying the file to userspace memory. + // So instead, we append metadata to the file after printing each node_module + // When there are no more modules to process, we generate the metadata + // To find the metadata, you look at the byte offset: initial_header[magic_bytes.len..initial_header.len - 1] + // Then, you add that number to initial_header.len + const initial_header = brk: { + var buf = std.mem.zeroes([magic_bytes.len + 5]u8); + std.mem.copy(u8, &buf, magic_bytes); + var remainder = buf[magic_bytes.len..]; + // Write an invalid byte offset to be updated after we finish generating the code + std.mem.writeIntNative(u32, remainder[0 .. remainder.len - 1], 0); + buf[buf.len - 1] = '\n'; + break :brk buf; + }; + const code_start_byte_offset: u32 = initial_header.len; + // The specifics of the metadata is not documented here. You can find it in src/api/schema.peechy. + + pub fn appendHeaderString(generator: *GenerateNodeModuleBundle, str: string) !Api.StringPointer { + // This is so common we might as well just reuse it + // Plus this is one machine word so it's a quick comparison + if (strings.eqlComptime(str, "index.js")) { + return index_js_string_pointer; + } else if (strings.eqlComptime(str, "dist/index.js")) { + return dist_index_js_string_pointer; + } - var tmpfile = try tmpdir.createFileZ(tmpname, .{ .read = isDebug, .exclusive = true }); + var offset = generator.header_string_buffer.list.items.len; + try generator.header_string_buffer.append(str); + return Api.StringPointer{ + .offset = @truncate(u32, offset), + .length = @truncate(u32, str.len), + }; + } - errdefer { - tmpfile.close(); - tmpdir.deleteFile(std.mem.span(tmpname)) catch {}; - } + pub fn generate( + bundler: *ThisBundler, + allocator: *std.mem.Allocator, + framework_config: ?Api.LoadedFramework, + route_config: ?Api.LoadedRouteConfig, + destination: [*:0]const u8, + estimated_input_lines_of_code: *usize, + ) !?Api.JavascriptBundleContainer { + var tmpdir: std.fs.Dir = try bundler.fs.fs.openTmpDir(); + var tmpname_buf: [64]u8 = undefined; + bundler.resetStore(); + try bundler.configureDefines(); - var generator = try allocator.create(GenerateNodeModuleBundle); - var queue = try BunQueue.init(allocator); - defer allocator.destroy(generator); - generator.* = GenerateNodeModuleBundle{ - .module_list = std.ArrayList(Api.JavascriptBundledModule).init(allocator), - .package_list = std.ArrayList(Api.JavascriptBundledPackage).init(allocator), - .header_string_buffer = try MutableString.init(allocator, "dist/index.js".len), - .allocator = allocator, - .queue = queue, - .estimated_input_lines_of_code = 0, - // .resolve_queue = queue, - .bundler = bundler, - .tmpfile = tmpfile, - - .dynamic_import_file_size_store = U32Map.init(allocator), - .dynamic_import_file_size_store_lock = Lock.init(), - .log = bundler.log, - .package_list_map = std.AutoHashMap(u64, u32).init(allocator), - .pool = undefined, - .write_lock = Lock.init(), - }; - // dist/index.js appears more common than /index.js - // but this means we can store both "dist/index.js" and "index.js" in one. - try generator.header_string_buffer.append("dist/index.js"); - try generator.package_list_map.ensureTotalCapacity(128); - var pool = try allocator.create(ThreadPool); - pool.* = ThreadPool{}; - generator.pool = pool; - - var this = generator; - // Always inline the runtime into the bundle - try generator.appendBytes(&initial_header); - // If we try to be smart and rely on .written, it turns out incorrect - const code_start_pos = try this.tmpfile.getPos(); - if (isDebug) { - try generator.appendBytes(runtime.Runtime.sourceContent()); - try generator.appendBytes("\n\n"); - } else { - try generator.appendBytes(comptime runtime.Runtime.sourceContent() ++ "\n\n"); - } + const tmpname = try bundler.fs.tmpname( + ".bun", + std.mem.span(&tmpname_buf), + std.hash.Wyhash.hash(0, std.mem.span(destination)), + ); - if (bundler.log.level == .verbose) { - bundler.resolver.debug_logs = try DebugLogs.init(allocator); - } + var tmpfile = try tmpdir.createFileZ(tmpname, .{ .read = isDebug, .exclusive = true }); - always_bundled: { - const root_package_json_resolved: _resolver.Result = bundler.resolver.resolve(bundler.fs.top_level_dir, "./package.json", .stmt) catch |err| { - generator.log.addWarning(null, logger.Loc.Empty, "Please run `bun bun` from a directory containing a package.json.") catch unreachable; - break :always_bundled; - }; - const root_package_json = root_package_json_resolved.package_json orelse brk: { - const read_dir = (bundler.resolver.readDirInfo(bundler.fs.top_level_dir) catch unreachable).?; - break :brk read_dir.package_json.?; - }; - if (root_package_json.always_bundle.len > 0) { - var always_bundled_package_jsons = bundler.allocator.alloc(*PackageJSON, root_package_json.always_bundle.len) catch unreachable; - var always_bundled_package_hashes = bundler.allocator.alloc(u32, root_package_json.always_bundle.len) catch unreachable; - var i: u16 = 0; - - inner: for (root_package_json.always_bundle) |name| { - std.mem.copy(u8, &tmp_buildfile_buf, name); - std.mem.copy(u8, tmp_buildfile_buf[name.len..], "/package.json"); - const package_json_import = tmp_buildfile_buf[0 .. name.len + "/package.json".len]; - const result = bundler.resolver.resolve(bundler.fs.top_level_dir, package_json_import, .stmt) catch |err| { - generator.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving always bundled module \"{s}\"", .{ @errorName(err), name }) catch unreachable; - continue :inner; - }; + errdefer { + tmpfile.close(); + tmpdir.deleteFile(std.mem.span(tmpname)) catch {}; + } - var package_json: *PackageJSON = result.package_json orelse brk: { - const read_dir = (bundler.resolver.readDirInfo(package_json_import) catch unreachable).?; - if (read_dir.package_json == null) { - generator.log.addWarningFmt(null, logger.Loc.Empty, bundler.allocator, "{s} missing package.json. It will not be bundled", .{name}) catch unreachable; - continue :inner; - } - break :brk read_dir.package_json.?; - }; + var generator = try allocator.create(GenerateNodeModuleBundle); + var queue = try BunQueue.init(allocator); + defer allocator.destroy(generator); + generator.* = GenerateNodeModuleBundle{ + .module_list = std.ArrayList(Api.JavascriptBundledModule).init(allocator), + .package_list = std.ArrayList(Api.JavascriptBundledPackage).init(allocator), + .header_string_buffer = try MutableString.init(allocator, "dist/index.js".len), + .allocator = allocator, + .queue = queue, + .estimated_input_lines_of_code = 0, + // .resolve_queue = queue, + .bundler = bundler, + .tmpfile = tmpfile, + + .dynamic_import_file_size_store = U32Map.init(allocator), + .dynamic_import_file_size_store_lock = Lock.init(), + .log = bundler.log, + .package_list_map = std.AutoHashMap(u64, u32).init(allocator), + .pool = undefined, + .write_lock = Lock.init(), + }; + // dist/index.js appears more common than /index.js + // but this means we can store both "dist/index.js" and "index.js" in one. + try generator.header_string_buffer.append("dist/index.js"); + try generator.package_list_map.ensureTotalCapacity(128); + var pool = try allocator.create(ThreadPool); + pool.* = ThreadPool{}; + generator.pool = pool; + + var this = generator; + // Always inline the runtime into the bundle + try generator.appendBytes(&initial_header); + // If we try to be smart and rely on .written, it turns out incorrect + const code_start_pos = try this.tmpfile.getPos(); + if (isDebug) { + try generator.appendBytes(runtime.Runtime.sourceContent()); + try generator.appendBytes("\n\n"); + } else { + try generator.appendBytes(comptime runtime.Runtime.sourceContent() ++ "\n\n"); + } - package_json.source.key_path = result.path_pair.primary; - - // if (!strings.contains(result.path_pair.primary.text, package_json.name)) { - // generator.log.addErrorFmt( - // null, - // logger.Loc.Empty, - // bundler.allocator, - // "Bundling \"{s}\" is not supported because the package isn.\n To fix this, move the package's code to a directory containing the name.\n Location: \"{s}\"", - // .{ - // name, - // name, - // result.path_pair.primary.text, - // }, - // ) catch unreachable; - // continue :inner; - // } + if (bundler.log.level == .verbose) { + bundler.resolver.debug_logs = try DebugLogs.init(allocator); + } - always_bundled_package_jsons[i] = package_json; - always_bundled_package_hashes[i] = package_json.hash; - i += 1; - } - generator.always_bundled_package_hashes = always_bundled_package_hashes[0..i]; - generator.always_bundled_package_jsons = always_bundled_package_jsons[0..i]; + always_bundled: { + const root_package_json_resolved: _resolver.Result = bundler.resolver.resolve(bundler.fs.top_level_dir, "./package.json", .stmt) catch |err| { + generator.log.addWarning(null, logger.Loc.Empty, "Please run `bun bun` from a directory containing a package.json.") catch unreachable; + break :always_bundled; + }; + const root_package_json = root_package_json_resolved.package_json orelse brk: { + const read_dir = (bundler.resolver.readDirInfo(bundler.fs.top_level_dir) catch unreachable).?; + break :brk read_dir.package_json.?; + }; + if (root_package_json.always_bundle.len > 0) { + var always_bundled_package_jsons = bundler.allocator.alloc(*PackageJSON, root_package_json.always_bundle.len) catch unreachable; + var always_bundled_package_hashes = bundler.allocator.alloc(u32, root_package_json.always_bundle.len) catch unreachable; + var i: u16 = 0; + + inner: for (root_package_json.always_bundle) |name| { + std.mem.copy(u8, &tmp_buildfile_buf, name); + std.mem.copy(u8, tmp_buildfile_buf[name.len..], "/package.json"); + const package_json_import = tmp_buildfile_buf[0 .. name.len + "/package.json".len]; + const result = bundler.resolver.resolve(bundler.fs.top_level_dir, package_json_import, .stmt) catch |err| { + generator.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving always bundled module \"{s}\"", .{ @errorName(err), name }) catch unreachable; + continue :inner; + }; + + var package_json: *PackageJSON = result.package_json orelse brk: { + const read_dir = (bundler.resolver.readDirInfo(package_json_import) catch unreachable).?; + if (read_dir.package_json == null) { + generator.log.addWarningFmt(null, logger.Loc.Empty, bundler.allocator, "{s} missing package.json. It will not be bundled", .{name}) catch unreachable; + continue :inner; + } + break :brk read_dir.package_json.?; + }; + + package_json.source.key_path = result.path_pair.primary; + + // if (!strings.contains(result.path_pair.primary.text, package_json.name)) { + // generator.log.addErrorFmt( + // null, + // logger.Loc.Empty, + // bundler.allocator, + // "Bundling \"{s}\" is not supported because the package isn.\n To fix this, move the package's code to a directory containing the name.\n Location: \"{s}\"", + // .{ + // name, + // name, + // result.path_pair.primary.text, + // }, + // ) catch unreachable; + // continue :inner; + // } + + always_bundled_package_jsons[i] = package_json; + always_bundled_package_hashes[i] = package_json.hash; + i += 1; } + generator.always_bundled_package_hashes = always_bundled_package_hashes[0..i]; + generator.always_bundled_package_jsons = always_bundled_package_jsons[0..i]; } - if (generator.log.errors > 0) return error.BundleFailed; - - const include_refresh_runtime = - !this.bundler.options.production and - this.bundler.options.jsx.supports_fast_refresh and - bundler.options.platform.isWebLike(); - - const resolve_queue_estimate = bundler.options.entry_points.len + - @intCast(usize, @boolToInt(framework_config != null)) + - @intCast(usize, @boolToInt(include_refresh_runtime)) + - @intCast(usize, @boolToInt(bundler.options.jsx.parse)); - - if (bundler.router) |router| { - defer this.bundler.resetStore(); - - const entry_points = try router.getEntryPoints(allocator); - for (entry_points) |entry_point| { - const source_dir = bundler.fs.top_level_dir; - const resolved = try bundler.linker.resolver.resolve(source_dir, entry_point, .entry_point); - try this.enqueueItem(resolved); - } - this.bundler.resetStore(); - } else {} + } + if (generator.log.errors > 0) return error.BundleFailed; + + const include_refresh_runtime = + !this.bundler.options.production and + this.bundler.options.jsx.supports_fast_refresh and + bundler.options.platform.isWebLike(); - for (bundler.options.entry_points) |entry_point| { - if (bundler.options.platform == .bun) continue; - defer this.bundler.resetStore(); + const resolve_queue_estimate = bundler.options.entry_points.len + + @intCast(usize, @boolToInt(framework_config != null)) + + @intCast(usize, @boolToInt(include_refresh_runtime)) + + @intCast(usize, @boolToInt(bundler.options.jsx.parse)); - const entry_point_path = bundler.normalizeEntryPointPath(entry_point); + if (bundler.router) |router| { + defer this.bundler.resetStore(); + + const entry_points = try router.getEntryPoints(allocator); + for (entry_points) |entry_point| { const source_dir = bundler.fs.top_level_dir; const resolved = try bundler.linker.resolver.resolve(source_dir, entry_point, .entry_point); try this.enqueueItem(resolved); } + this.bundler.resetStore(); + } else {} - if (framework_config) |conf| { - defer this.bundler.resetStore(); + for (bundler.options.entry_points) |entry_point| { + if (bundler.options.platform == .bun) continue; + defer this.bundler.resetStore(); - try this.bundler.configureFramework(true); - if (bundler.options.framework) |framework| { - if (framework.override_modules.keys.len > 0) { - bundler.options.framework.?.override_modules_hashes = allocator.alloc(u64, framework.override_modules.keys.len) catch unreachable; - for (framework.override_modules.keys) |key, i| { - bundler.options.framework.?.override_modules_hashes[i] = std.hash.Wyhash.hash(0, key); - } - } - if (bundler.options.platform == .bun) { - if (framework.server.isEnabled()) { - const resolved = try bundler.linker.resolver.resolve( - bundler.fs.top_level_dir, - framework.server.path, - .entry_point, - ); - try this.enqueueItem(resolved); - } - } else { - if (framework.client.isEnabled()) { - const resolved = try bundler.linker.resolver.resolve( - bundler.fs.top_level_dir, - framework.client.path, - .entry_point, - ); - try this.enqueueItem(resolved); - } + const entry_point_path = bundler.normalizeEntryPointPath(entry_point); + const source_dir = bundler.fs.top_level_dir; + const resolved = try bundler.linker.resolver.resolve(source_dir, entry_point, .entry_point); + try this.enqueueItem(resolved); + } - if (framework.fallback.isEnabled()) { - const resolved = try bundler.linker.resolver.resolve( - bundler.fs.top_level_dir, - framework.fallback.path, - .entry_point, - ); - try this.enqueueItem(resolved); - } + if (framework_config) |conf| { + defer this.bundler.resetStore(); + + try this.bundler.configureFramework(true); + if (bundler.options.framework) |framework| { + if (framework.override_modules.keys.len > 0) { + bundler.options.framework.?.override_modules_hashes = allocator.alloc(u64, framework.override_modules.keys.len) catch unreachable; + for (framework.override_modules.keys) |key, i| { + bundler.options.framework.?.override_modules_hashes[i] = std.hash.Wyhash.hash(0, key); } } - } else {} - - // Normally, this is automatic - // However, since we only do the parsing pass, it may not get imported automatically. - if (bundler.options.jsx.parse) { - defer this.bundler.resetStore(); - if (this.bundler.resolver.resolve( - this.bundler.fs.top_level_dir, - this.bundler.options.jsx.import_source, - .require, - )) |new_jsx_runtime| { - try this.enqueueItem(new_jsx_runtime); - } else |err| {} - } + if (bundler.options.platform == .bun) { + if (framework.server.isEnabled()) { + const resolved = try bundler.linker.resolver.resolve( + bundler.fs.top_level_dir, + framework.server.path, + .entry_point, + ); + try this.enqueueItem(resolved); + } + } else { + if (framework.client.isEnabled()) { + const resolved = try bundler.linker.resolver.resolve( + bundler.fs.top_level_dir, + framework.client.path, + .entry_point, + ); + try this.enqueueItem(resolved); + } - var refresh_runtime_module_id: u32 = 0; - if (include_refresh_runtime) { - defer this.bundler.resetStore(); - - if (this.bundler.resolver.resolve( - this.bundler.fs.top_level_dir, - this.bundler.options.jsx.refresh_runtime, - .require, - )) |refresh_runtime| { - try this.enqueueItem(refresh_runtime); - if (BundledModuleData.get(this, &refresh_runtime)) |mod| { - refresh_runtime_module_id = mod.module_id; + if (framework.fallback.isEnabled()) { + const resolved = try bundler.linker.resolver.resolve( + bundler.fs.top_level_dir, + framework.fallback.path, + .entry_point, + ); + try this.enqueueItem(resolved); } - } else |err| {} + } } + } else {} - this.bundler.resetStore(); + // Normally, this is automatic + // However, since we only do the parsing pass, it may not get imported automatically. + if (bundler.options.jsx.parse) { + defer this.bundler.resetStore(); + if (this.bundler.resolver.resolve( + this.bundler.fs.top_level_dir, + this.bundler.options.jsx.import_source, + .require, + )) |new_jsx_runtime| { + try this.enqueueItem(new_jsx_runtime); + } else |err| {} + } - try this.pool.start(this); - try this.pool.wait(this); - estimated_input_lines_of_code.* = generator.estimated_input_lines_of_code; + var refresh_runtime_module_id: u32 = 0; + if (include_refresh_runtime) { + defer this.bundler.resetStore(); - // if (comptime !isRelease) { - // this.queue.checkDuplicatesSlow(); - // } + if (this.bundler.resolver.resolve( + this.bundler.fs.top_level_dir, + this.bundler.options.jsx.refresh_runtime, + .require, + )) |refresh_runtime| { + try this.enqueueItem(refresh_runtime); + if (BundledModuleData.get(this, &refresh_runtime)) |mod| { + refresh_runtime_module_id = mod.module_id; + } + } else |err| {} + } - if (this.log.errors > 0) { - tmpfile.close(); - tmpdir.deleteFile(std.mem.span(tmpname)) catch {}; - // We stop here because if there are errors we don't know if the bundle is valid - // This manifests as a crash when sorting through the module list because we may have added files to the bundle which were never actually finished being added. - return null; - } + this.bundler.resetStore(); - // Delay by one tick so that the rest of the file loads first - if (include_refresh_runtime and refresh_runtime_module_id > 0) { - var refresh_runtime_injector_buf: [1024]u8 = undefined; - var fixed_buffer = std.io.fixedBufferStream(&refresh_runtime_injector_buf); - var fixed_buffer_writer = fixed_buffer.writer(); - - fixed_buffer_writer.print( - \\if ('window' in globalThis) {{ - \\ (async function() {{ - \\ BUN_RUNTIME.__injectFastRefresh(${x}()); - \\ }})(); - \\}} - , - .{refresh_runtime_module_id}, - ) catch unreachable; - try this.tmpfile.writeAll(fixed_buffer.buffer[0..fixed_buffer.pos]); - } + try this.pool.start(this); + try this.pool.wait(this); + estimated_input_lines_of_code.* = generator.estimated_input_lines_of_code; - // Ensure we never overflow - this.code_end_byte_offset = @truncate( - u32, - // Doing this math ourself seems to not necessarily produce correct results - (try this.tmpfile.getPos()), - ); - - var javascript_bundle_container = std.mem.zeroes(Api.JavascriptBundleContainer); + // if (comptime !isRelease) { + // this.queue.checkDuplicatesSlow(); + // } - std.sort.sort( - Api.JavascriptBundledModule, - this.module_list.items, - this, - GenerateNodeModuleBundle.sortJavascriptModuleByPath, - ); + if (this.log.errors > 0) { + tmpfile.close(); + tmpdir.deleteFile(std.mem.span(tmpname)) catch {}; + // We stop here because if there are errors we don't know if the bundle is valid + // This manifests as a crash when sorting through the module list because we may have added files to the bundle which were never actually finished being added. + return null; + } - if (comptime isDebug) { - const SeenHash = std.AutoHashMap(u64, void); - var map = SeenHash.init(this.allocator); - var ids = SeenHash.init(this.allocator); - try map.ensureTotalCapacity(@truncate(u32, this.module_list.items.len)); - try ids.ensureTotalCapacity(@truncate(u32, this.module_list.items.len)); - - for (this.module_list.items) |a| { - const a_pkg: Api.JavascriptBundledPackage = this.package_list.items[a.package_id]; - const a_name = this.metadataStringPointer(a_pkg.name); - const a_version = this.metadataStringPointer(a_pkg.version); - const a_path = this.metadataStringPointer(a.path); - - std.debug.assert(a_name.len > 0); - std.debug.assert(a_version.len > 0); - std.debug.assert(a_path.len > 0); - var hash_print = std.mem.zeroes([4096]u8); - const hash = std.hash.Wyhash.hash(0, std.fmt.bufPrint(&hash_print, "{s}@{s}/{s}", .{ a_name, a_version, a_path }) catch unreachable); - var result1 = map.getOrPutAssumeCapacity(hash); - std.debug.assert(!result1.found_existing); - - var result2 = ids.getOrPutAssumeCapacity(a.id); - std.debug.assert(!result2.found_existing); - } - } + // Delay by one tick so that the rest of the file loads first + if (include_refresh_runtime and refresh_runtime_module_id > 0) { + var refresh_runtime_injector_buf: [1024]u8 = undefined; + var fixed_buffer = std.io.fixedBufferStream(&refresh_runtime_injector_buf); + var fixed_buffer_writer = fixed_buffer.writer(); + + fixed_buffer_writer.print( + \\if ('window' in globalThis) {{ + \\ (async function() {{ + \\ BUN_RUNTIME.__injectFastRefresh(${x}()); + \\ }})(); + \\}} + , + .{refresh_runtime_module_id}, + ) catch unreachable; + try this.tmpfile.writeAll(fixed_buffer.buffer[0..fixed_buffer.pos]); + } - var hasher = std.hash.Wyhash.init(0); - - // We want to sort the packages as well as the files - // The modules sort the packages already - // So can just copy it in the below loop. - var sorted_package_list = try allocator.alloc(Api.JavascriptBundledPackage, this.package_list.items.len); - - // At this point, the module_list is sorted. - if (this.module_list.items.len > 0) { - var package_id_i: u32 = 0; - var i: usize = 0; - // Assumption: node_modules are immutable - // Assumption: module files are immutable - // (They're not. But, for our purposes that's okay) - // The etag is: - // - The hash of each module's path in sorted order - // - The hash of each module's code size in sorted order - // - hash(hash(package_name, package_version)) - // If this doesn't prove strong enough, we will do a proper content hash - // But I want to avoid that overhead unless proven necessary. - // There's a good chance we don't even strictly need an etag here. - var bytes: [4]u8 = undefined; - while (i < this.module_list.items.len) { - var current_package_id = this.module_list.items[i].package_id; - this.module_list.items[i].package_id = package_id_i; - var offset = @truncate(u32, i); + // Ensure we never overflow + this.code_end_byte_offset = @truncate( + u32, + // Doing this math ourself seems to not necessarily produce correct results + (try this.tmpfile.getPos()), + ); - i += 1; + var javascript_bundle_container = std.mem.zeroes(Api.JavascriptBundleContainer); - while (i < this.module_list.items.len and this.module_list.items[i].package_id == current_package_id) : (i += 1) { - this.module_list.items[i].package_id = package_id_i; - // Hash the file path - hasher.update(this.metadataStringPointer(this.module_list.items[i].path)); - // Then the length of the code - std.mem.writeIntNative(u32, &bytes, this.module_list.items[i].code.length); - hasher.update(&bytes); - } + std.sort.sort( + Api.JavascriptBundledModule, + this.module_list.items, + this, + GenerateNodeModuleBundle.sortJavascriptModuleByPath, + ); - this.package_list.items[current_package_id].modules_offset = offset; - this.package_list.items[current_package_id].modules_length = @truncate(u32, i) - offset; + if (comptime isDebug) { + const SeenHash = std.AutoHashMap(u64, void); + var map = SeenHash.init(this.allocator); + var ids = SeenHash.init(this.allocator); + try map.ensureTotalCapacity(@truncate(u32, this.module_list.items.len)); + try ids.ensureTotalCapacity(@truncate(u32, this.module_list.items.len)); + + for (this.module_list.items) |a| { + const a_pkg: Api.JavascriptBundledPackage = this.package_list.items[a.package_id]; + const a_name = this.metadataStringPointer(a_pkg.name); + const a_version = this.metadataStringPointer(a_pkg.version); + const a_path = this.metadataStringPointer(a.path); + + std.debug.assert(a_name.len > 0); + std.debug.assert(a_version.len > 0); + std.debug.assert(a_path.len > 0); + var hash_print = std.mem.zeroes([4096]u8); + const hash = std.hash.Wyhash.hash(0, std.fmt.bufPrint(&hash_print, "{s}@{s}/{s}", .{ a_name, a_version, a_path }) catch unreachable); + var result1 = map.getOrPutAssumeCapacity(hash); + std.debug.assert(!result1.found_existing); + + var result2 = ids.getOrPutAssumeCapacity(a.id); + std.debug.assert(!result2.found_existing); + } + } - // Hash the hash of the package name - // it's hash(hash(package_name, package_version)) - std.mem.writeIntNative(u32, &bytes, this.package_list.items[current_package_id].hash); + var hasher = std.hash.Wyhash.init(0); + + // We want to sort the packages as well as the files + // The modules sort the packages already + // So can just copy it in the below loop. + var sorted_package_list = try allocator.alloc(Api.JavascriptBundledPackage, this.package_list.items.len); + + // At this point, the module_list is sorted. + if (this.module_list.items.len > 0) { + var package_id_i: u32 = 0; + var i: usize = 0; + // Assumption: node_modules are immutable + // Assumption: module files are immutable + // (They're not. But, for our purposes that's okay) + // The etag is: + // - The hash of each module's path in sorted order + // - The hash of each module's code size in sorted order + // - hash(hash(package_name, package_version)) + // If this doesn't prove strong enough, we will do a proper content hash + // But I want to avoid that overhead unless proven necessary. + // There's a good chance we don't even strictly need an etag here. + var bytes: [4]u8 = undefined; + while (i < this.module_list.items.len) { + var current_package_id = this.module_list.items[i].package_id; + this.module_list.items[i].package_id = package_id_i; + var offset = @truncate(u32, i); + + i += 1; + + while (i < this.module_list.items.len and this.module_list.items[i].package_id == current_package_id) : (i += 1) { + this.module_list.items[i].package_id = package_id_i; + // Hash the file path + hasher.update(this.metadataStringPointer(this.module_list.items[i].path)); + // Then the length of the code + std.mem.writeIntNative(u32, &bytes, this.module_list.items[i].code.length); hasher.update(&bytes); - - sorted_package_list[package_id_i] = this.package_list.items[current_package_id]; - package_id_i += 1; } - } - var javascript_bundle = std.mem.zeroes(Api.JavascriptBundle); - javascript_bundle.modules = this.module_list.items; - javascript_bundle.packages = sorted_package_list; - javascript_bundle.manifest_string = this.header_string_buffer.list.items; - const etag_u64 = hasher.final(); - // We store the etag as a ascii hex encoded u64 - // This is so we can send the bytes directly in the HTTP server instead of formatting it as hex each time. - javascript_bundle.etag = try std.fmt.allocPrint(allocator, "{x}", .{etag_u64}); - javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp())); - - const basename = std.fs.path.basename(std.mem.span(destination)); - const extname = std.fs.path.extension(basename); - javascript_bundle.import_from_name = if (bundler.options.platform == .bun) - "/node_modules.server.bun" - else - try std.fmt.allocPrint( - this.allocator, - "/{s}.{x}.bun", - .{ - basename[0 .. basename.len - extname.len], - etag_u64, - }, - ); + this.package_list.items[current_package_id].modules_offset = offset; + this.package_list.items[current_package_id].modules_length = @truncate(u32, i) - offset; - javascript_bundle_container.bundle_format_version = current_version; - javascript_bundle_container.bundle = javascript_bundle; - javascript_bundle_container.code_length = this.code_end_byte_offset; - javascript_bundle_container.framework = framework_config; - javascript_bundle_container.routes = route_config; - - var start_pos = try this.tmpfile.getPos(); - var tmpwriter = std.io.bufferedWriter(this.tmpfile.writer()); - const SchemaWriter = schema.Writer(@TypeOf(tmpwriter.writer())); - var schema_file_writer = SchemaWriter.init(tmpwriter.writer()); - try javascript_bundle_container.encode(&schema_file_writer); - try tmpwriter.flush(); - - // sanity check - if (isDebug) { - try this.tmpfile.seekTo(start_pos); - var contents = try allocator.alloc(u8, (try this.tmpfile.getEndPos()) - start_pos); - var read_bytes = try this.tmpfile.read(contents); - var buf = contents[0..read_bytes]; - var reader = schema.Reader.init(buf, allocator); - - var decoder = try Api.JavascriptBundleContainer.decode( - &reader, - ); - std.debug.assert(decoder.code_length.? == javascript_bundle_container.code_length.?); + // Hash the hash of the package name + // it's hash(hash(package_name, package_version)) + std.mem.writeIntNative(u32, &bytes, this.package_list.items[current_package_id].hash); + hasher.update(&bytes); + + sorted_package_list[package_id_i] = this.package_list.items[current_package_id]; + package_id_i += 1; } + } - var code_length_bytes: [4]u8 = undefined; - std.mem.writeIntNative(u32, &code_length_bytes, this.code_end_byte_offset); - _ = try std.os.pwrite(this.tmpfile.handle, &code_length_bytes, magic_bytes.len); - - // Without his mutex, we get a crash at this location: - // try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, destination); - // ^ - const top_dir = try std.fs.openDirAbsolute(Fs.FileSystem.instance.top_level_dir, .{}); - _ = C.fchmod( - this.tmpfile.handle, - // chmod 777 - 0000010 | 0000100 | 0000001 | 0001000 | 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020, + var javascript_bundle = std.mem.zeroes(Api.JavascriptBundle); + javascript_bundle.modules = this.module_list.items; + javascript_bundle.packages = sorted_package_list; + javascript_bundle.manifest_string = this.header_string_buffer.list.items; + const etag_u64 = hasher.final(); + // We store the etag as a ascii hex encoded u64 + // This is so we can send the bytes directly in the HTTP server instead of formatting it as hex each time. + javascript_bundle.etag = try std.fmt.allocPrint(allocator, "{x}", .{etag_u64}); + javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp())); + + const basename = std.fs.path.basename(std.mem.span(destination)); + const extname = std.fs.path.extension(basename); + javascript_bundle.import_from_name = if (bundler.options.platform == .bun) + "/node_modules.server.bun" + else + try std.fmt.allocPrint( + this.allocator, + "/{s}.{x}.bun", + .{ + basename[0 .. basename.len - extname.len], + etag_u64, + }, ); - try std.os.renameatZ(tmpdir.fd, tmpname, top_dir.fd, destination); - // Print any errors at the end - // try this.log.print(Output.errorWriter()); - return javascript_bundle_container; - } - pub fn metadataStringPointer(this: *GenerateNodeModuleBundle, ptr: Api.StringPointer) string { - return this.header_string_buffer.list.items[ptr.offset .. ptr.offset + ptr.length]; + javascript_bundle_container.bundle_format_version = current_version; + javascript_bundle_container.bundle = javascript_bundle; + javascript_bundle_container.code_length = this.code_end_byte_offset; + javascript_bundle_container.framework = framework_config; + javascript_bundle_container.routes = route_config; + + var start_pos = try this.tmpfile.getPos(); + var tmpwriter = std.io.bufferedWriter(this.tmpfile.writer()); + const SchemaWriter = schema.Writer(@TypeOf(tmpwriter.writer())); + var schema_file_writer = SchemaWriter.init(tmpwriter.writer()); + try javascript_bundle_container.encode(&schema_file_writer); + try tmpwriter.flush(); + + // sanity check + if (isDebug) { + try this.tmpfile.seekTo(start_pos); + var contents = try allocator.alloc(u8, (try this.tmpfile.getEndPos()) - start_pos); + var read_bytes = try this.tmpfile.read(contents); + var buf = contents[0..read_bytes]; + var reader = schema.Reader.init(buf, allocator); + + var decoder = try Api.JavascriptBundleContainer.decode( + &reader, + ); + std.debug.assert(decoder.code_length.? == javascript_bundle_container.code_length.?); } - // Since we trim the prefixes, we must also compare the package name and version - pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool { - return switch (std.mem.order( + var code_length_bytes: [4]u8 = undefined; + std.mem.writeIntNative(u32, &code_length_bytes, this.code_end_byte_offset); + _ = try std.os.pwrite(this.tmpfile.handle, &code_length_bytes, magic_bytes.len); + + // Without his mutex, we get a crash at this location: + // try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, destination); + // ^ + const top_dir = try std.fs.openDirAbsolute(Fs.FileSystem.instance.top_level_dir, .{}); + _ = C.fchmod( + this.tmpfile.handle, + // chmod 777 + 0000010 | 0000100 | 0000001 | 0001000 | 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020, + ); + try std.os.renameatZ(tmpdir.fd, tmpname, top_dir.fd, destination); + // Print any errors at the end + // try this.log.print(Output.errorWriter()); + return javascript_bundle_container; + } + + pub fn metadataStringPointer(this: *GenerateNodeModuleBundle, ptr: Api.StringPointer) string { + return this.header_string_buffer.list.items[ptr.offset .. ptr.offset + ptr.length]; + } + + // Since we trim the prefixes, we must also compare the package name and version + pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool { + return switch (std.mem.order( + u8, + ctx.metadataStringPointer( + ctx.package_list.items[a.package_id].name, + ), + ctx.metadataStringPointer( + ctx.package_list.items[b.package_id].name, + ), + )) { + .eq => switch (std.mem.order( u8, ctx.metadataStringPointer( - ctx.package_list.items[a.package_id].name, + ctx.package_list.items[a.package_id].version, ), ctx.metadataStringPointer( - ctx.package_list.items[b.package_id].name, + ctx.package_list.items[b.package_id].version, ), )) { - .eq => switch (std.mem.order( + .eq => std.mem.order( u8, - ctx.metadataStringPointer( - ctx.package_list.items[a.package_id].version, - ), - ctx.metadataStringPointer( - ctx.package_list.items[b.package_id].version, - ), - )) { - .eq => std.mem.order( - u8, - ctx.metadataStringPointer(a.path), - ctx.metadataStringPointer(b.path), - ) == .lt, - .lt => true, - else => false, - }, + ctx.metadataStringPointer(a.path), + ctx.metadataStringPointer(b.path), + ) == .lt, .lt => true, else => false, - }; - } + }, + .lt => true, + else => false, + }; + } - // pub fn sortJavascriptPackageByName(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledPackage, b: Api.JavascriptBundledPackage) bool { - // return std.mem.order(u8, ctx.metadataStringPointer(a.name), ctx.metadataStringPointer(b.name)) == .lt; - // } + // pub fn sortJavascriptPackageByName(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledPackage, b: Api.JavascriptBundledPackage) bool { + // return std.mem.order(u8, ctx.metadataStringPointer(a.name), ctx.metadataStringPointer(b.name)) == .lt; + // } + + pub fn appendBytes(generator: *GenerateNodeModuleBundle, bytes: anytype) !void { + try generator.tmpfile.writeAll(bytes); + generator.tmpfile_byte_offset += @truncate(u32, bytes.len); + } - pub fn appendBytes(generator: *GenerateNodeModuleBundle, bytes: anytype) !void { - try generator.tmpfile.writeAll(bytes); - generator.tmpfile_byte_offset += @truncate(u32, bytes.len); + const BundledModuleData = struct { + import_path: string, + package_path: string, + package: *const PackageJSON, + module_id: u32, + + pub fn getForceBundle(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { + return _get(this, resolve_result, true, false); } - const BundledModuleData = struct { - import_path: string, - package_path: string, - package: *const PackageJSON, - module_id: u32, + pub fn getForceBundleForMain(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { + return _get(this, resolve_result, true, true); + } - pub fn getForceBundle(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { - return _get(this, resolve_result, true, false); + threadlocal var normalized_package_path: [512]u8 = undefined; + threadlocal var normalized_package_path2: [512]u8 = undefined; + inline fn _get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result, comptime force: bool, comptime is_main: bool) ?BundledModuleData { + const path = resolve_result.pathConst() orelse return null; + if (strings.eqlComptime(path.namespace, "node")) { + const _import_path = path.text["/bun-vfs/node_modules/".len..][resolve_result.package_json.?.name.len + 1 ..]; + return BundledModuleData{ + .import_path = _import_path, + .package_path = path.text["/bun-vfs/node_modules/".len..], + .package = resolve_result.package_json.?, + .module_id = resolve_result.package_json.?.hashModule(_import_path), + }; } - pub fn getForceBundleForMain(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { - return _get(this, resolve_result, true, true); - } + var import_path = path.text; + var package_path = path.text; + var file_path = path.text; + + if (resolve_result.package_json) |pkg| { + if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash) != null) { + const key_path_source_dir = pkg.source.key_path.sourceDir(); + const default_source_dir = pkg.source.path.sourceDir(); + if (strings.startsWith(path.text, key_path_source_dir)) { + import_path = path.text[key_path_source_dir.len..]; + } else if (strings.startsWith(path.text, default_source_dir)) { + import_path = path.text[default_source_dir.len..]; + } else if (strings.startsWith(path.pretty, pkg.name)) { + import_path = path.pretty[pkg.name.len + 1 ..]; + } - threadlocal var normalized_package_path: [512]u8 = undefined; - threadlocal var normalized_package_path2: [512]u8 = undefined; - inline fn _get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result, comptime force: bool, comptime is_main: bool) ?BundledModuleData { - const path = resolve_result.pathConst() orelse return null; - if (strings.eqlComptime(path.namespace, "node")) { - const _import_path = path.text["/bun-vfs/node_modules/".len..][resolve_result.package_json.?.name.len + 1 ..]; + var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path; + + std.mem.copy(u8, buf_to_use, pkg.name); + buf_to_use[pkg.name.len] = '/'; + std.mem.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); + package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1]; return BundledModuleData{ - .import_path = _import_path, - .package_path = path.text["/bun-vfs/node_modules/".len..], - .package = resolve_result.package_json.?, - .module_id = resolve_result.package_json.?.hashModule(_import_path), + .import_path = import_path, + .package_path = package_path, + .package = pkg, + .module_id = pkg.hashModule(package_path), }; } + } - var import_path = path.text; - var package_path = path.text; - var file_path = path.text; - - if (resolve_result.package_json) |pkg| { - if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash) != null) { - const key_path_source_dir = pkg.source.key_path.sourceDir(); - const default_source_dir = pkg.source.path.sourceDir(); - if (strings.startsWith(path.text, key_path_source_dir)) { - import_path = path.text[key_path_source_dir.len..]; - } else if (strings.startsWith(path.text, default_source_dir)) { - import_path = path.text[default_source_dir.len..]; - } else if (strings.startsWith(path.pretty, pkg.name)) { - import_path = path.pretty[pkg.name.len + 1 ..]; - } + const root: _resolver.RootPathPair = this.bundler.resolver.rootNodeModulePackageJSON( + resolve_result, + ) orelse return null; + + var base_path = root.base_path; + const package_json = root.package_json; + + // Easymode: the file path doesn't need to be remapped. + if (strings.startsWith(file_path, base_path)) { + import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/"); + package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/"); + std.debug.assert(import_path.len > 0); + return BundledModuleData{ + .import_path = import_path, + .package_path = package_path, + .package = package_json, + .module_id = package_json.hashModule(package_path), + }; + } + + if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| { + package_path = file_path[i..]; + import_path = package_path[package_json.name.len + 1 ..]; + std.debug.assert(import_path.len > 0); + return BundledModuleData{ + .import_path = import_path, + .package_path = package_path, + .package = package_json, + .module_id = package_json.hashModule(package_path), + }; + } - var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path; + if (comptime force) { + if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, root.package_json.hash)) |pkg_json_i| { + const pkg_json = this.always_bundled_package_jsons[pkg_json_i]; + base_path = pkg_json.source.key_path.sourceDir(); - std.mem.copy(u8, buf_to_use, pkg.name); - buf_to_use[pkg.name.len] = '/'; - std.mem.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); - package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1]; + if (strings.startsWith(file_path, base_path)) { + import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/"); + package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/"); + std.debug.assert(import_path.len > 0); return BundledModuleData{ .import_path = import_path, .package_path = package_path, - .package = pkg, - .module_id = pkg.hashModule(package_path), + .package = package_json, + .module_id = package_json.hashModule(package_path), }; } - } - - const root: _resolver.RootPathPair = this.bundler.resolver.rootNodeModulePackageJSON( - resolve_result, - ) orelse return null; - - var base_path = root.base_path; - const package_json = root.package_json; - - // Easymode: the file path doesn't need to be remapped. - if (strings.startsWith(file_path, base_path)) { - import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/"); - package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/"); - std.debug.assert(import_path.len > 0); - return BundledModuleData{ - .import_path = import_path, - .package_path = package_path, - .package = package_json, - .module_id = package_json.hashModule(package_path), - }; - } - - if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| { - package_path = file_path[i..]; - import_path = package_path[package_json.name.len + 1 ..]; - std.debug.assert(import_path.len > 0); - return BundledModuleData{ - .import_path = import_path, - .package_path = package_path, - .package = package_json, - .module_id = package_json.hashModule(package_path), - }; - } - - if (comptime force) { - if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, root.package_json.hash)) |pkg_json_i| { - const pkg_json = this.always_bundled_package_jsons[pkg_json_i]; - base_path = pkg_json.source.key_path.sourceDir(); - - if (strings.startsWith(file_path, base_path)) { - import_path = std.mem.trimLeft(u8, path.text[base_path.len..], "/"); - package_path = std.mem.trim(u8, path.text[base_path.len - package_json.name.len - 1 ..], "/"); - std.debug.assert(import_path.len > 0); - return BundledModuleData{ - .import_path = import_path, - .package_path = package_path, - .package = package_json, - .module_id = package_json.hashModule(package_path), - }; - } - if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| { - package_path = file_path[i..]; - import_path = package_path[package_json.name.len + 1 ..]; - std.debug.assert(import_path.len > 0); - return BundledModuleData{ - .import_path = import_path, - .package_path = package_path, - .package = package_json, - .module_id = package_json.hashModule(package_path), - }; - } + if (std.mem.lastIndexOf(u8, file_path, package_json.name)) |i| { + package_path = file_path[i..]; + import_path = package_path[package_json.name.len + 1 ..]; + std.debug.assert(import_path.len > 0); + return BundledModuleData{ + .import_path = import_path, + .package_path = package_path, + .package = package_json, + .module_id = package_json.hashModule(package_path), + }; } - unreachable; } - - return null; + unreachable; } - pub fn get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { - return _get(this, resolve_result, false, false); - } - }; + return null; + } - fn writeEmptyModule(this: *GenerateNodeModuleBundle, package_relative_path: string, module_id: u32) !u32 { - this.write_lock.lock(); - defer this.write_lock.unlock(); - var code_offset = @truncate(u32, try this.tmpfile.getPos()); - var writer = this.tmpfile.writer(); - var buffered = std.io.bufferedWriter(writer); - - var bufwriter = buffered.writer(); - try bufwriter.writeAll("// "); - try bufwriter.writeAll(package_relative_path); - try bufwriter.writeAll(" (disabled/empty)\nexport var $"); - std.fmt.formatInt(module_id, 16, .lower, .{}, bufwriter) catch unreachable; - try bufwriter.writeAll(" = () => { var obj = {}; Object.defineProperty(obj, 'default', { value: obj, enumerable: false, configurable: true }, obj); return obj; }; \n"); - try buffered.flush(); - this.tmpfile_byte_offset = @truncate(u32, try this.tmpfile.getPos()); - return code_offset; + pub fn get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData { + return _get(this, resolve_result, false, false); } + }; - fn processImportRecord(this: *GenerateNodeModuleBundle, import_record: ImportRecord) !void {} - var json_ast_symbols = [_]js_ast.Symbol{ - js_ast.Symbol{ .original_name = "$$m" }, - js_ast.Symbol{ .original_name = "exports" }, - js_ast.Symbol{ .original_name = "module" }, - js_ast.Symbol{ .original_name = "CONGRATS_YOU_FOUND_A_BUG" }, - js_ast.Symbol{ .original_name = "$$bun_runtime_json_parse" }, - }; - const json_parse_string = "parse"; - var json_ast_symbols_list = std.mem.span(&json_ast_symbols); - threadlocal var override_file_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; - - pub fn appendToModuleList( - this: *GenerateNodeModuleBundle, - package: *const PackageJSON, - module_id: u32, - code_offset: u32, - package_relative_path: string, - ) !void { - this.list_lock.lock(); - defer this.list_lock.unlock(); - - const code_length = @atomicLoad(u32, &this.tmpfile_byte_offset, .SeqCst) - code_offset; + fn writeEmptyModule(this: *GenerateNodeModuleBundle, package_relative_path: string, module_id: u32) !u32 { + this.write_lock.lock(); + defer this.write_lock.unlock(); + var code_offset = @truncate(u32, try this.tmpfile.getPos()); + var writer = this.tmpfile.writer(); + var buffered = std.io.bufferedWriter(writer); + + var bufwriter = buffered.writer(); + try bufwriter.writeAll("// "); + try bufwriter.writeAll(package_relative_path); + try bufwriter.writeAll(" (disabled/empty)\nexport var $"); + std.fmt.formatInt(module_id, 16, .lower, .{}, bufwriter) catch unreachable; + try bufwriter.writeAll(" = () => { var obj = {}; Object.defineProperty(obj, 'default', { value: obj, enumerable: false, configurable: true }, obj); return obj; }; \n"); + try buffered.flush(); + this.tmpfile_byte_offset = @truncate(u32, try this.tmpfile.getPos()); + return code_offset; + } - if (comptime isDebug) { - std.debug.assert(code_length > 0); - std.debug.assert(package.hash != 0); - std.debug.assert(package.version.len > 0); - std.debug.assert(package.name.len > 0); - std.debug.assert(module_id > 0); - } + fn processImportRecord(this: *GenerateNodeModuleBundle, import_record: ImportRecord) !void {} + var json_ast_symbols = [_]js_ast.Symbol{ + js_ast.Symbol{ .original_name = "$$m" }, + js_ast.Symbol{ .original_name = "exports" }, + js_ast.Symbol{ .original_name = "module" }, + js_ast.Symbol{ .original_name = "CONGRATS_YOU_FOUND_A_BUG" }, + js_ast.Symbol{ .original_name = "$$bun_runtime_json_parse" }, + }; + const json_parse_string = "parse"; + var json_ast_symbols_list = std.mem.span(&json_ast_symbols); + threadlocal var override_file_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + + pub fn appendToModuleList( + this: *GenerateNodeModuleBundle, + package: *const PackageJSON, + module_id: u32, + code_offset: u32, + package_relative_path: string, + ) !void { + this.list_lock.lock(); + defer this.list_lock.unlock(); - var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash); + const code_length = @atomicLoad(u32, &this.tmpfile_byte_offset, .SeqCst) - code_offset; - if (!package_get_or_put_entry.found_existing) { - package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len); - try this.package_list.append( - Api.JavascriptBundledPackage{ - .name = try this.appendHeaderString(package.name), - .version = try this.appendHeaderString(package.version), - .hash = package.hash, - }, - ); - this.has_jsx = this.has_jsx or strings.eql(package.name, this.bundler.options.jsx.package_name); - } + if (comptime isDebug) { + std.debug.assert(code_length > 0); + std.debug.assert(package.hash != 0); + std.debug.assert(package.version.len > 0); + std.debug.assert(package.name.len > 0); + std.debug.assert(module_id > 0); + } - var path_extname_length = @truncate(u8, std.fs.path.extension(package_relative_path).len); - try this.module_list.append( - Api.JavascriptBundledModule{ - .path = try this.appendHeaderString( - package_relative_path, - ), - .path_extname_length = path_extname_length, - .package_id = package_get_or_put_entry.value_ptr.*, - .id = module_id, - .code = Api.StringPointer{ - .length = @truncate(u32, code_length), - .offset = @truncate(u32, code_offset), - }, + var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash); + + if (!package_get_or_put_entry.found_existing) { + package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len); + try this.package_list.append( + Api.JavascriptBundledPackage{ + .name = try this.appendHeaderString(package.name), + .version = try this.appendHeaderString(package.version), + .hash = package.hash, }, ); + this.has_jsx = this.has_jsx or strings.eql(package.name, this.bundler.options.jsx.package_name); } - threadlocal var json_e_string: js_ast.E.String = undefined; - threadlocal var json_e_call: js_ast.E.Call = undefined; - threadlocal var json_e_identifier: js_ast.E.Identifier = undefined; - threadlocal var json_call_args: [1]js_ast.Expr = undefined; - pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, _resolve: _resolver.Result) !void { - const resolve = _resolve; - if (resolve.is_external) return; - - var shared_buffer = &worker.data.shared_buffer; - var scan_pass_result = &worker.data.scan_pass_result; - - const is_from_node_modules = resolve.isLikelyNodeModule() or brk: { - if (resolve.package_json) |pkg| { - break :brk std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash) != null; - } - break :brk false; + + var path_extname_length = @truncate(u8, std.fs.path.extension(package_relative_path).len); + try this.module_list.append( + Api.JavascriptBundledModule{ + .path = try this.appendHeaderString( + package_relative_path, + ), + .path_extname_length = path_extname_length, + .package_id = package_get_or_put_entry.value_ptr.*, + .id = module_id, + .code = Api.StringPointer{ + .length = @truncate(u32, code_length), + .offset = @truncate(u32, code_offset), + }, + }, + ); + } + threadlocal var json_e_string: js_ast.E.String = undefined; + threadlocal var json_e_call: js_ast.E.Call = undefined; + threadlocal var json_e_identifier: js_ast.E.Identifier = undefined; + threadlocal var json_call_args: [1]js_ast.Expr = undefined; + pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, _resolve: _resolver.Result) !void { + const resolve = _resolve; + if (resolve.is_external) return; + + var shared_buffer = &worker.data.shared_buffer; + var scan_pass_result = &worker.data.scan_pass_result; + + const is_from_node_modules = resolve.isLikelyNodeModule() or brk: { + if (resolve.package_json) |pkg| { + break :brk std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash) != null; + } + break :brk false; + }; + var file_path = (resolve.pathConst() orelse unreachable).*; + const source_dir = file_path.sourceDir(); + const loader = this.bundler.options.loader(file_path.name.ext); + var bundler = this.bundler; + defer scan_pass_result.reset(); + defer shared_buffer.reset(); + defer this.bundler.resetStore(); + var log = worker.data.log; + + // If we're in a node_module, build that almost normally + if (is_from_node_modules) { + var written: usize = undefined; + var code_offset: u32 = 0; + + const module_data = BundledModuleData.getForceBundleForMain(this, &resolve) orelse { + const fake_path = logger.Source.initPathString(file_path.text, ""); + log.addResolveError( + &fake_path, + logger.Range.None, + this.allocator, + "Bug while resolving: \"{s}\"", + .{file_path.text}, + resolve.import_kind, + ) catch {}; + return error.ResolveError; }; - var file_path = (resolve.pathConst() orelse unreachable).*; - const source_dir = file_path.sourceDir(); - const loader = this.bundler.options.loader(file_path.name.ext); - var bundler = this.bundler; - defer scan_pass_result.reset(); - defer shared_buffer.reset(); - defer this.bundler.resetStore(); - var log = worker.data.log; - - // If we're in a node_module, build that almost normally - if (is_from_node_modules) { - var written: usize = undefined; - var code_offset: u32 = 0; - - const module_data = BundledModuleData.getForceBundleForMain(this, &resolve) orelse { - const fake_path = logger.Source.initPathString(file_path.text, ""); - log.addResolveError( - &fake_path, - logger.Range.None, - this.allocator, - "Bug while resolving: \"{s}\"", - .{file_path.text}, - resolve.import_kind, - ) catch {}; - return error.ResolveError; - }; - const module_id = module_data.module_id; - const package = module_data.package; - const package_relative_path = module_data.import_path; - - file_path.pretty = module_data.package_path; - - const entry: CacheEntry = brk: { - if (this.bundler.options.framework) |framework| { - if (framework.override_modules_hashes.len > 0) { - const package_relative_path_hash = std.hash.Wyhash.hash(0, module_data.package_path); - if (std.mem.indexOfScalar( - u64, - framework.override_modules_hashes, - package_relative_path_hash, - )) |index| { - const relative_path = [_]string{ - framework.resolved_dir, - framework.override_modules.values[index], - }; - var override_path = this.bundler.fs.absBuf( - &relative_path, - &override_file_path_buf, - ); - override_file_path_buf[override_path.len] = 0; - var override_pathZ = override_file_path_buf[0..override_path.len :0]; - break :brk try bundler.resolver.caches.fs.readFileShared( - bundler.fs, - override_pathZ, - 0, - null, - shared_buffer, - ); - } + const module_id = module_data.module_id; + const package = module_data.package; + const package_relative_path = module_data.import_path; + + file_path.pretty = module_data.package_path; + + const entry: CacheEntry = brk: { + if (this.bundler.options.framework) |framework| { + if (framework.override_modules_hashes.len > 0) { + const package_relative_path_hash = std.hash.Wyhash.hash(0, module_data.package_path); + if (std.mem.indexOfScalar( + u64, + framework.override_modules_hashes, + package_relative_path_hash, + )) |index| { + const relative_path = [_]string{ + framework.resolved_dir, + framework.override_modules.values[index], + }; + var override_path = this.bundler.fs.absBuf( + &relative_path, + &override_file_path_buf, + ); + override_file_path_buf[override_path.len] = 0; + var override_pathZ = override_file_path_buf[0..override_path.len :0]; + break :brk try bundler.resolver.caches.fs.readFileShared( + bundler.fs, + override_pathZ, + 0, + null, + shared_buffer, + ); } } + } - if (!strings.eqlComptime(file_path.namespace, "node")) - break :brk try bundler.resolver.caches.fs.readFileShared( - bundler.fs, - file_path.textZ(), - resolve.dirname_fd, - if (resolve.file_fd != 0) resolve.file_fd else null, - shared_buffer, - ); + if (!strings.eqlComptime(file_path.namespace, "node")) + break :brk try bundler.resolver.caches.fs.readFileShared( + bundler.fs, + file_path.textZ(), + resolve.dirname_fd, + if (resolve.file_fd != 0) resolve.file_fd else null, + shared_buffer, + ); - break :brk CacheEntry{ - .contents = NodeFallbackModules.contentsFromPath(file_path.text) orelse "", - }; + break :brk CacheEntry{ + .contents = NodeFallbackModules.contentsFromPath(file_path.text) orelse "", }; + }; - var approximate_newline_count: usize = 0; - defer worker.data.estimated_input_lines_of_code += approximate_newline_count; - - // Handle empty files - // We can't just ignore them. Sometimes code will try to import it. Often because of TypeScript types. - // So we just say it's an empty object. Empty object mimicks what "browser": false does as well. - // TODO: optimize this so that all the exports for these are done in one line instead of writing repeatedly - if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) { - code_offset = try this.writeEmptyModule(module_data.package_path, module_id); - } else { - var ast: js_ast.Ast = undefined; - - const source = logger.Source.initRecycledFile( - Fs.File{ - .path = file_path, - .contents = entry.contents, - }, - bundler.allocator, - ) catch return null; - - switch (loader) { - .jsx, - .tsx, - .js, - .ts, - => { - var jsx = bundler.options.jsx; - jsx.parse = loader.isJSX(); - - var opts = js_parser.Parser.Options.init(jsx, loader); - opts.transform_require_to_import = false; - opts.enable_bundling = true; - opts.warn_about_unbundled_modules = false; - - ast = (try bundler.resolver.caches.js.parse( - bundler.allocator, - opts, - bundler.options.define, - log, - &source, - )) orelse return; - approximate_newline_count = ast.approximate_newline_count; - if (ast.import_records.len > 0) { - for (ast.import_records) |*import_record, record_id| { - - // Don't resolve the runtime - if (import_record.is_internal or import_record.is_unused) { - continue; - } - - if (bundler.linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| { - if (_resolved_import.is_external) { - continue; - } - var path = _resolved_import.path() orelse { - import_record.path.is_disabled = true; - import_record.is_bundled = true; - continue; - }; - - const loader_ = bundler.options.loader(path.name.ext); - - if (!loader_.isJavaScriptLikeOrJSON()) { - import_record.path.is_disabled = true; - import_record.is_bundled = true; - continue; - } - - // if (_resolved_import.package_json == null) |pkg_json| { - // _resolved_import.package_json = if (pkg_json.hash == resolve.package_json.?.hash) - // resolve.package_json - // else - // _resolved_import.package_json; - // } - - const resolved_import: *const _resolver.Result = _resolved_import; - - const _module_data = BundledModuleData.getForceBundle(this, resolved_import) orelse unreachable; - import_record.module_id = _module_data.module_id; - std.debug.assert(import_record.module_id != 0); - import_record.is_bundled = true; - - path.* = try path.dupeAlloc(this.allocator); - - import_record.path = path.*; - - try this.queue.upsert( - _module_data.module_id, - _resolved_import.*, - ); - } else |err| { - if (comptime isDebug) { - Output.prettyErrorln("\n<r><red>{s}<r> on resolving \"{s}\" from \"{s}\"", .{ - @errorName(err), - import_record.path.text, - file_path.text, - }); - } - - switch (err) { - error.ModuleNotFound => { - if (isPackagePath(import_record.path.text)) { - if (this.bundler.options.platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { - try log.addResolveErrorWithTextDupe( - &source, - import_record.range, - this.allocator, - "Could not resolve Node.js builtin: \"{s}\".", - .{import_record.path.text}, - import_record.kind, - ); - } else { - try log.addResolveErrorWithTextDupe( - &source, - import_record.range, - this.allocator, - "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?", - .{import_record.path.text}, - import_record.kind, - ); - } - } else { - try log.addResolveErrorWithTextDupe( - &source, - import_record.range, - this.allocator, - "Could not resolve: \"{s}\"", - .{ - import_record.path.text, - }, - import_record.kind, - ); - } - }, - // assume other errors are already in the log - else => {}, - } - } - } - } - }, - .json => { - // parse the JSON _only_ to catch errors at build time. - const json_parse_result = json_parser.ParseJSONForBundling(&source, worker.data.log, worker.allocator) catch return; - - if (json_parse_result.tag != .empty) { - const expr = brk: { - // If it's an ascii string, we just print it out with a big old JSON.parse() - if (json_parse_result.tag == .ascii) { - json_e_string = js_ast.E.String{ .utf8 = source.contents, .prefer_template = true }; - var json_string_expr = js_ast.Expr{ .data = .{ .e_string = &json_e_string }, .loc = logger.Loc{ .start = 0 } }; - json_call_args[0] = json_string_expr; - json_e_identifier = js_ast.E.Identifier{ .ref = Ref{ .source_index = 0, .inner_index = @intCast(Ref.Int, json_ast_symbols_list.len - 1) } }; - - json_e_call = js_ast.E.Call{ - .target = js_ast.Expr{ .data = .{ .e_identifier = &json_e_identifier }, .loc = logger.Loc{ .start = 0 } }, - .args = std.mem.span(&json_call_args), - }; - break :brk js_ast.Expr{ .data = .{ .e_call = &json_e_call }, .loc = logger.Loc{ .start = 0 } }; - // If we're going to have to convert it to a UTF16, just make it an object actually - } else { - break :brk json_parse_result.expr; - } - }; - - var stmt = js_ast.Stmt.alloc(worker.allocator, js_ast.S.ExportDefault, js_ast.S.ExportDefault{ - .value = js_ast.StmtOrExpr{ .expr = expr }, - .default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} }, - }, logger.Loc{ .start = 0 }); - var stmts = worker.allocator.alloc(js_ast.Stmt, 1) catch unreachable; - stmts[0] = stmt; - var parts = worker.allocator.alloc(js_ast.Part, 1) catch unreachable; - parts[0] = js_ast.Part{ .stmts = stmts }; - ast = js_ast.Ast.initTest(parts); - - ast.runtime_imports = runtime.Runtime.Imports{}; - ast.runtime_imports.@"$$m" = .{ .ref = Ref{ .source_index = 0, .inner_index = 0 }, .primary = Ref.None, .backup = Ref.None }; - ast.runtime_imports.__export = .{ .ref = Ref{ .source_index = 0, .inner_index = 1 }, .primary = Ref.None, .backup = Ref.None }; - ast.symbols = json_ast_symbols_list; - ast.module_ref = Ref{ .source_index = 0, .inner_index = 2 }; - ast.exports_ref = ast.runtime_imports.__export.?.ref; - ast.bundle_export_ref = Ref{ .source_index = 0, .inner_index = 3 }; - } else { - var parts = &[_]js_ast.Part{}; - ast = js_ast.Ast.initTest(parts); - } - }, - else => { - return; - }, - } - - switch (ast.parts.len) { - // It can be empty after parsing too - // A file like this is an example: - // "//# sourceMappingURL=validator.js.map" - 0 => { - code_offset = try this.writeEmptyModule(module_data.package_path, module_id); - }, - else => { - const register_ref = ast.runtime_imports.@"$$m".?.ref; - const E = js_ast.E; - const Expr = js_ast.Expr; - const Stmt = js_ast.Stmt; - - var prepend_part: js_ast.Part = undefined; - var needs_prepend_part = false; - if (ast.parts.len > 1) { - for (ast.parts) |part| { - if (part.tag != .none and part.stmts.len > 0) { - prepend_part = part; - needs_prepend_part = true; - break; - } - } - } - - var package_path = js_ast.E.String{ .utf8 = module_data.package_path }; - - var target_identifier = E.Identifier{ .ref = register_ref }; - var cjs_args: [2]js_ast.G.Arg = undefined; - var module_binding = js_ast.B.Identifier{ .ref = ast.module_ref.? }; - var exports_binding = js_ast.B.Identifier{ .ref = ast.exports_ref.? }; - - var part = &ast.parts[ast.parts.len - 1]; - - var new_stmts: [1]Stmt = undefined; - var register_args: [1]Expr = undefined; - var closure = E.Arrow{ - .args = &cjs_args, - .body = .{ - .loc = logger.Loc.Empty, - .stmts = part.stmts, - }, - }; - - cjs_args[0] = js_ast.G.Arg{ - .binding = js_ast.Binding{ - .loc = logger.Loc.Empty, - .data = .{ .b_identifier = &module_binding }, - }, - }; - cjs_args[1] = js_ast.G.Arg{ - .binding = js_ast.Binding{ - .loc = logger.Loc.Empty, - .data = .{ .b_identifier = &exports_binding }, - }, - }; - - var properties: [1]js_ast.G.Property = undefined; - var e_object = E.Object{ - .properties = &properties, - }; - const module_path_str = js_ast.Expr{ .data = .{ .e_string = &package_path }, .loc = logger.Loc.Empty }; - properties[0] = js_ast.G.Property{ - .key = module_path_str, - .value = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_arrow = &closure } }, - }; - - // if (!ast.uses_module_ref) { - // var symbol = &ast.symbols[ast.module_ref.?.inner_index]; - // symbol.original_name = "_$$"; - // } - - // $$m(12345, "react", "index.js", function(module, exports) { - - // }) - var accessor = js_ast.E.Index{ .index = module_path_str, .target = js_ast.Expr{ - .data = .{ .e_object = &e_object }, - .loc = logger.Loc.Empty, - } }; - register_args[0] = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_index = &accessor } }; - - var call_register = E.Call{ - .target = Expr{ - .data = .{ .e_identifier = &target_identifier }, - .loc = logger.Loc{ .start = 0 }, - }, - .args = ®ister_args, - }; - var register_expr = Expr{ .loc = call_register.target.loc, .data = .{ .e_call = &call_register } }; - var decls: [1]js_ast.G.Decl = undefined; - var bundle_export_binding = js_ast.B.Identifier{ .ref = ast.runtime_imports.@"$$m".?.ref }; - var binding = js_ast.Binding{ - .loc = register_expr.loc, - .data = .{ .b_identifier = &bundle_export_binding }, - }; - decls[0] = js_ast.G.Decl{ - .value = register_expr, - .binding = binding, - }; - var export_var = js_ast.S.Local{ - .decls = &decls, - .is_export = true, - }; - new_stmts[0] = Stmt{ .loc = register_expr.loc, .data = .{ .s_local = &export_var } }; - part.stmts = &new_stmts; - - var writer = js_printer.NewFileWriter(this.tmpfile); - var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols}); - - // It should only have one part. - ast.parts = ast.parts[ast.parts.len - 1 ..]; - const write_result = - try js_printer.printCommonJSThreaded( - @TypeOf(writer), - writer, - ast, - js_ast.Symbol.Map.initList(symbols), - &source, - false, - js_printer.Options{ - .to_module_ref = Ref.RuntimeRef, - .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref, - .source_path = file_path, - .externals = ast.externals, - .indent = 0, - .require_ref = ast.require_ref, - .module_hash = module_id, - .runtime_imports = ast.runtime_imports, - .prepend_part_value = &prepend_part, - .prepend_part_key = if (needs_prepend_part) closure.body.stmts.ptr else null, - }, - Linker, - &bundler.linker, - &this.write_lock, - std.fs.File, - this.tmpfile, - std.fs.File.getPos, - &this.tmpfile_byte_offset, - ); + var approximate_newline_count: usize = 0; + defer worker.data.estimated_input_lines_of_code += approximate_newline_count; - code_offset = write_result.off; - }, - } - } + // Handle empty files + // We can't just ignore them. Sometimes code will try to import it. Often because of TypeScript types. + // So we just say it's an empty object. Empty object mimicks what "browser": false does as well. + // TODO: optimize this so that all the exports for these are done in one line instead of writing repeatedly + if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) { + code_offset = try this.writeEmptyModule(module_data.package_path, module_id); + } else { + var ast: js_ast.Ast = undefined; - if (comptime isDebug) { - Output.prettyln("{s}@{s}/{s} - {d}:{d} \n", .{ package.name, package.version, package_relative_path, package.hash, module_id }); - Output.flush(); - std.debug.assert(package_relative_path.len > 0); - } + const source = logger.Source.initRecycledFile( + Fs.File{ + .path = file_path, + .contents = entry.contents, + }, + bundler.allocator, + ) catch return null; - try this.appendToModuleList( - package, - module_id, - code_offset, - package_relative_path, - ); - } else { - // If it's app code, scan but do not fully parse. switch (loader) { .jsx, .tsx, .js, .ts, => { - const entry = bundler.resolver.caches.fs.readFileShared( - bundler.fs, - file_path.textZ(), - resolve.dirname_fd, - if (resolve.file_fd != 0) resolve.file_fd else null, - shared_buffer, - ) catch return; - if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) return; - - const source = logger.Source.initRecycledFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null; - var jsx = bundler.options.jsx; jsx.parse = loader.isJSX(); + var opts = js_parser.Parser.Options.init(jsx, loader); + opts.transform_require_to_import = false; + opts.enable_bundling = true; + opts.warn_about_unbundled_modules = false; - try bundler.resolver.caches.js.scan( + ast = (try bundler.resolver.caches.js.parse( bundler.allocator, - scan_pass_result, opts, bundler.options.define, log, &source, - ); - worker.data.estimated_input_lines_of_code += scan_pass_result.approximate_newline_count; + )) orelse return; + approximate_newline_count = ast.approximate_newline_count; + if (ast.import_records.len > 0) { + for (ast.import_records) |*import_record, record_id| { - { - for (scan_pass_result.import_records.items) |*import_record, i| { + // Don't resolve the runtime if (import_record.is_internal or import_record.is_unused) { continue; } @@ -1844,93 +1508,51 @@ pub fn NewBundler(cache_files: bool) type { if (_resolved_import.is_external) { continue; } + var path = _resolved_import.path() orelse { + import_record.path.is_disabled = true; + import_record.is_bundled = true; + continue; + }; - var path = _resolved_import.path() orelse continue; + const loader_ = bundler.options.loader(path.name.ext); - const loader_ = this.bundler.options.loader(path.name.ext); - if (!loader_.isJavaScriptLikeOrJSON()) continue; + if (!loader_.isJavaScriptLikeOrJSON()) { + import_record.path.is_disabled = true; + import_record.is_bundled = true; + continue; + } - path.* = try path.dupeAlloc(this.allocator); + // if (_resolved_import.package_json == null) |pkg_json| { + // _resolved_import.package_json = if (pkg_json.hash == resolve.package_json.?.hash) + // resolve.package_json + // else + // _resolved_import.package_json; + // } - if (BundledModuleData.get(this, _resolved_import)) |mod| { - if (comptime !FeatureFlags.bundle_dynamic_import) { - if (import_record.kind == .dynamic) - continue; - } else { - // When app code dynamically imports a large file - // Don't bundle it. Leave it as a separate file. - // The main value from bundling in development is to minimize tiny, waterfall http requests - // If you're importing > 100 KB file dynamically, developer is probably explicitly trying to do that. - // There's a tradeoff between "I want to minimize page load time" - if (import_record.kind == .dynamic) { - this.dynamic_import_file_size_store_lock.lock(); - defer this.dynamic_import_file_size_store_lock.unlock(); - var dynamic_import_file_size = this.dynamic_import_file_size_store.getOrPut(mod.module_id) catch unreachable; - if (!dynamic_import_file_size.found_existing) { - var fd = _resolved_import.file_fd; - var can_close = false; - if (fd == 0) { - dynamic_import_file_size.value_ptr.* = 0; - fd = (std.fs.openFileAbsolute(path.textZ(), .{}) catch |err| { - this.log.addRangeWarningFmt( - &source, - import_record.range, - worker.allocator, - "{s} opening file: \"{s}\"", - .{ @errorName(err), path.text }, - ) catch unreachable; - continue; - }).handle; - can_close = true; - Fs.FileSystem.setMaxFd(fd); - } - - defer { - if (can_close and bundler.fs.fs.needToCloseFiles()) { - var _file = std.fs.File{ .handle = fd }; - _file.close(); - _resolved_import.file_fd = 0; - } else if (FeatureFlags.store_file_descriptors) { - _resolved_import.file_fd = fd; - } - } + const resolved_import: *const _resolver.Result = _resolved_import; - var file = std.fs.File{ .handle = fd }; - var stat = file.stat() catch |err| { - this.log.addRangeWarningFmt( - &source, - import_record.range, - worker.allocator, - "{s} stat'ing file: \"{s}\"", - .{ @errorName(err), path.text }, - ) catch unreachable; - dynamic_import_file_size.value_ptr.* = 0; - continue; - }; + const _module_data = BundledModuleData.getForceBundle(this, resolved_import) orelse unreachable; + import_record.module_id = _module_data.module_id; + std.debug.assert(import_record.module_id != 0); + import_record.is_bundled = true; - dynamic_import_file_size.value_ptr.* = @truncate(u32, stat.size); - } + path.* = try path.dupeAlloc(this.allocator); - if (dynamic_import_file_size.value_ptr.* > 1024 * 100) - continue; - } - } + import_record.path = path.*; - std.debug.assert(mod.module_id != 0); - try this.queue.upsert( - mod.module_id, - _resolved_import.*, - ); - } else { - try this.queue.upsert( - _resolved_import.hash( - this.bundler.fs.top_level_dir, - loader_, - ), - _resolved_import.*, - ); - } + try this.queue.upsert( + _module_data.module_id, + _resolved_import.*, + ); } else |err| { + if (comptime isDebug) { + Output.prettyErrorln("\n<r><red>{s}<r> on resolving \"{s}\" from \"{s}\"", .{ + @errorName(err), + import_record.path.text, + file_path.text, + }); + } + switch (err) { error.ModuleNotFound => { if (isPackagePath(import_record.path.text)) { @@ -1973,862 +1595,1238 @@ pub fn NewBundler(cache_files: bool) type { } } }, - else => {}, + .json => { + // parse the JSON _only_ to catch errors at build time. + const json_parse_result = json_parser.ParseJSONForBundling(&source, worker.data.log, worker.allocator) catch return; + + if (json_parse_result.tag != .empty) { + const expr = brk: { + // If it's an ascii string, we just print it out with a big old JSON.parse() + if (json_parse_result.tag == .ascii) { + json_e_string = js_ast.E.String{ .utf8 = source.contents, .prefer_template = true }; + var json_string_expr = js_ast.Expr{ .data = .{ .e_string = &json_e_string }, .loc = logger.Loc{ .start = 0 } }; + json_call_args[0] = json_string_expr; + json_e_identifier = js_ast.E.Identifier{ .ref = Ref{ .source_index = 0, .inner_index = @intCast(Ref.Int, json_ast_symbols_list.len - 1) } }; + + json_e_call = js_ast.E.Call{ + .target = js_ast.Expr{ .data = .{ .e_identifier = &json_e_identifier }, .loc = logger.Loc{ .start = 0 } }, + .args = std.mem.span(&json_call_args), + }; + break :brk js_ast.Expr{ .data = .{ .e_call = &json_e_call }, .loc = logger.Loc{ .start = 0 } }; + // If we're going to have to convert it to a UTF16, just make it an object actually + } else { + break :brk json_parse_result.expr; + } + }; + + var stmt = js_ast.Stmt.alloc(worker.allocator, js_ast.S.ExportDefault, js_ast.S.ExportDefault{ + .value = js_ast.StmtOrExpr{ .expr = expr }, + .default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} }, + }, logger.Loc{ .start = 0 }); + var stmts = worker.allocator.alloc(js_ast.Stmt, 1) catch unreachable; + stmts[0] = stmt; + var parts = worker.allocator.alloc(js_ast.Part, 1) catch unreachable; + parts[0] = js_ast.Part{ .stmts = stmts }; + ast = js_ast.Ast.initTest(parts); + + ast.runtime_imports = runtime.Runtime.Imports{}; + ast.runtime_imports.@"$$m" = .{ .ref = Ref{ .source_index = 0, .inner_index = 0 }, .primary = Ref.None, .backup = Ref.None }; + ast.runtime_imports.__export = .{ .ref = Ref{ .source_index = 0, .inner_index = 1 }, .primary = Ref.None, .backup = Ref.None }; + ast.symbols = json_ast_symbols_list; + ast.module_ref = Ref{ .source_index = 0, .inner_index = 2 }; + ast.exports_ref = ast.runtime_imports.__export.?.ref; + ast.bundle_export_ref = Ref{ .source_index = 0, .inner_index = 3 }; + } else { + var parts = &[_]js_ast.Part{}; + ast = js_ast.Ast.initTest(parts); + } + }, + else => { + return; + }, } - } - } - }; - pub const BuildResolveResultPair = struct { - written: usize, - input_fd: ?StoredFileDescriptorType, - empty: bool = false, - }; - pub fn buildWithResolveResult( - bundler: *ThisBundler, - resolve_result: _resolver.Result, - allocator: *std.mem.Allocator, - loader: options.Loader, - comptime Writer: type, - writer: Writer, - comptime import_path_format: options.BundleOptions.ImportPathFormat, - file_descriptor: ?StoredFileDescriptorType, - filepath_hash: u32, - comptime WatcherType: type, - watcher: *WatcherType, - client_entry_point: ?*ClientEntryPoint, - ) !BuildResolveResultPair { - if (resolve_result.is_external) { - return BuildResolveResultPair{ - .written = 0, - .input_fd = null, - }; - } + switch (ast.parts.len) { + // It can be empty after parsing too + // A file like this is an example: + // "//# sourceMappingURL=validator.js.map" + 0 => { + code_offset = try this.writeEmptyModule(module_data.package_path, module_id); + }, + else => { + const register_ref = ast.runtime_imports.@"$$m".?.ref; + const E = js_ast.E; + const Expr = js_ast.Expr; + const Stmt = js_ast.Stmt; + + var prepend_part: js_ast.Part = undefined; + var needs_prepend_part = false; + if (ast.parts.len > 1) { + for (ast.parts) |part| { + if (part.tag != .none and part.stmts.len > 0) { + prepend_part = part; + needs_prepend_part = true; + break; + } + } + } - errdefer bundler.resetStore(); + var package_path = js_ast.E.String{ .utf8 = module_data.package_path }; - var file_path = (resolve_result.pathConst() orelse { - return BuildResolveResultPair{ - .written = 0, - .input_fd = null, - }; - }).*; + var target_identifier = E.Identifier{ .ref = register_ref }; + var cjs_args: [2]js_ast.G.Arg = undefined; + var module_binding = js_ast.B.Identifier{ .ref = ast.module_ref.? }; + var exports_binding = js_ast.B.Identifier{ .ref = ast.exports_ref.? }; - if (strings.indexOf(file_path.text, bundler.fs.top_level_dir)) |i| { - file_path.pretty = file_path.text[i + bundler.fs.top_level_dir.len ..]; - } else if (!file_path.is_symlink) { - file_path.pretty = allocator.dupe(u8, bundler.fs.relativeTo(file_path.text)) catch unreachable; - } + var part = &ast.parts[ast.parts.len - 1]; - var old_bundler_allocator = bundler.allocator; - bundler.allocator = allocator; - defer bundler.allocator = old_bundler_allocator; - var old_linker_allocator = bundler.linker.allocator; - defer bundler.linker.allocator = old_linker_allocator; - bundler.linker.allocator = allocator; - - switch (loader) { - .css => { - const CSSBundlerHMR = Css.NewBundler( - Writer, - @TypeOf(&bundler.linker), - @TypeOf(&bundler.resolver.caches.fs), - WatcherType, - @TypeOf(bundler.fs), - true, - ); + var new_stmts: [1]Stmt = undefined; + var register_args: [1]Expr = undefined; + var closure = E.Arrow{ + .args = &cjs_args, + .body = .{ + .loc = logger.Loc.Empty, + .stmts = part.stmts, + }, + }; - const CSSBundler = Css.NewBundler( - Writer, - @TypeOf(&bundler.linker), - @TypeOf(&bundler.resolver.caches.fs), - WatcherType, - @TypeOf(bundler.fs), - false, - ); + cjs_args[0] = js_ast.G.Arg{ + .binding = js_ast.Binding{ + .loc = logger.Loc.Empty, + .data = .{ .b_identifier = &module_binding }, + }, + }; + cjs_args[1] = js_ast.G.Arg{ + .binding = js_ast.Binding{ + .loc = logger.Loc.Empty, + .data = .{ .b_identifier = &exports_binding }, + }, + }; - return BuildResolveResultPair{ - .written = brk: { - if (bundler.options.hot_module_reloading) { - break :brk (try CSSBundlerHMR.bundle( - file_path.text, - bundler.fs, - writer, - watcher, - &bundler.resolver.caches.fs, - filepath_hash, - file_descriptor, - allocator, - bundler.log, - &bundler.linker, - )).written; - } else { - break :brk (try CSSBundler.bundle( - file_path.text, - bundler.fs, - writer, - watcher, - &bundler.resolver.caches.fs, - filepath_hash, - file_descriptor, - allocator, - bundler.log, - &bundler.linker, - )).written; - } - }, - .input_fd = file_descriptor, - }; - }, - else => { - var result = bundler.parse( - allocator, - file_path, - loader, - resolve_result.dirname_fd, - file_descriptor, - filepath_hash, - client_entry_point, - ) orelse { - bundler.resetStore(); - return BuildResolveResultPair{ - .written = 0, - .input_fd = null, - }; - }; + var properties: [1]js_ast.G.Property = undefined; + var e_object = E.Object{ + .properties = &properties, + }; + const module_path_str = js_ast.Expr{ .data = .{ .e_string = &package_path }, .loc = logger.Loc.Empty }; + properties[0] = js_ast.G.Property{ + .key = module_path_str, + .value = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_arrow = &closure } }, + }; - if (result.empty) { - return BuildResolveResultPair{ .written = 0, .input_fd = result.input_fd, .empty = true }; - } + // if (!ast.uses_module_ref) { + // var symbol = &ast.symbols[ast.module_ref.?.inner_index]; + // symbol.original_name = "_$$"; + // } - try bundler.linker.link(file_path, &result, import_path_format, false); + // $$m(12345, "react", "index.js", function(module, exports) { + + // }) + var accessor = js_ast.E.Index{ .index = module_path_str, .target = js_ast.Expr{ + .data = .{ .e_object = &e_object }, + .loc = logger.Loc.Empty, + } }; + register_args[0] = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_index = &accessor } }; + + var call_register = E.Call{ + .target = Expr{ + .data = .{ .e_identifier = &target_identifier }, + .loc = logger.Loc{ .start = 0 }, + }, + .args = ®ister_args, + }; + var register_expr = Expr{ .loc = call_register.target.loc, .data = .{ .e_call = &call_register } }; + var decls: [1]js_ast.G.Decl = undefined; + var bundle_export_binding = js_ast.B.Identifier{ .ref = ast.runtime_imports.@"$$m".?.ref }; + var binding = js_ast.Binding{ + .loc = register_expr.loc, + .data = .{ .b_identifier = &bundle_export_binding }, + }; + decls[0] = js_ast.G.Decl{ + .value = register_expr, + .binding = binding, + }; + var export_var = js_ast.S.Local{ + .decls = &decls, + .is_export = true, + }; + new_stmts[0] = Stmt{ .loc = register_expr.loc, .data = .{ .s_local = &export_var } }; + part.stmts = &new_stmts; - return BuildResolveResultPair{ - .written = switch (result.ast.exports_kind) { - .esm => try bundler.print( - result, - Writer, - writer, - .esm, - ), - .cjs => try bundler.print( - result, - Writer, + var writer = js_printer.NewFileWriter(this.tmpfile); + var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols}); + + // It should only have one part. + ast.parts = ast.parts[ast.parts.len - 1 ..]; + const write_result = + try js_printer.printCommonJSThreaded( + @TypeOf(writer), writer, - .cjs, - ), - else => unreachable, + ast, + js_ast.Symbol.Map.initList(symbols), + &source, + false, + js_printer.Options{ + .to_module_ref = Ref.RuntimeRef, + .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref, + .source_path = file_path, + .externals = ast.externals, + .indent = 0, + .require_ref = ast.require_ref, + .module_hash = module_id, + .runtime_imports = ast.runtime_imports, + .prepend_part_value = &prepend_part, + .prepend_part_key = if (needs_prepend_part) closure.body.stmts.ptr else null, + }, + Linker, + &bundler.linker, + &this.write_lock, + std.fs.File, + this.tmpfile, + std.fs.File.getPos, + &this.tmpfile_byte_offset, + ); + + code_offset = write_result.off; }, - .input_fd = result.input_fd, - }; - }, - } - } + } + } - pub fn buildWithResolveResultEager( - bundler: *ThisBundler, - resolve_result: _resolver.Result, - comptime import_path_format: options.BundleOptions.ImportPathFormat, - comptime Outstream: type, - outstream: Outstream, - client_entry_point_: ?*ClientEntryPoint, - ) !?options.OutputFile { - if (resolve_result.is_external) { - return null; - } + if (comptime isDebug) { + Output.prettyln("{s}@{s}/{s} - {d}:{d} \n", .{ package.name, package.version, package_relative_path, package.hash, module_id }); + Output.flush(); + std.debug.assert(package_relative_path.len > 0); + } - var file_path = (resolve_result.pathConst() orelse return null).*; + try this.appendToModuleList( + package, + module_id, + code_offset, + package_relative_path, + ); + } else { + // If it's app code, scan but do not fully parse. + switch (loader) { + .jsx, + .tsx, + .js, + .ts, + => { + const entry = bundler.resolver.caches.fs.readFileShared( + bundler.fs, + file_path.textZ(), + resolve.dirname_fd, + if (resolve.file_fd != 0) resolve.file_fd else null, + shared_buffer, + ) catch return; + if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) return; + + const source = logger.Source.initRecycledFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null; + + var jsx = bundler.options.jsx; + jsx.parse = loader.isJSX(); + var opts = js_parser.Parser.Options.init(jsx, loader); + + try bundler.resolver.caches.js.scan( + bundler.allocator, + scan_pass_result, + opts, + bundler.options.define, + log, + &source, + ); + worker.data.estimated_input_lines_of_code += scan_pass_result.approximate_newline_count; - // Step 1. Parse & scan - const loader = bundler.options.loader(file_path.name.ext); + { + for (scan_pass_result.import_records.items) |*import_record, i| { + if (import_record.is_internal or import_record.is_unused) { + continue; + } - if (client_entry_point_) |client_entry_point| { - file_path = client_entry_point.source.path; - } + if (bundler.linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| { + if (_resolved_import.is_external) { + continue; + } - file_path.pretty = Linker.relative_paths_list.append(string, bundler.fs.relativeTo(file_path.text)) catch unreachable; + var path = _resolved_import.path() orelse continue; - var output_file = options.OutputFile{ - .input = file_path, - .loader = loader, - .value = undefined, - }; + const loader_ = this.bundler.options.loader(path.name.ext); + if (!loader_.isJavaScriptLikeOrJSON()) continue; - var file: std.fs.File = undefined; + path.* = try path.dupeAlloc(this.allocator); - if (Outstream == std.fs.Dir) { - const output_dir = outstream; + if (BundledModuleData.get(this, _resolved_import)) |mod| { + if (comptime !FeatureFlags.bundle_dynamic_import) { + if (import_record.kind == .dynamic) + continue; + } else { + // When app code dynamically imports a large file + // Don't bundle it. Leave it as a separate file. + // The main value from bundling in development is to minimize tiny, waterfall http requests + // If you're importing > 100 KB file dynamically, developer is probably explicitly trying to do that. + // There's a tradeoff between "I want to minimize page load time" + if (import_record.kind == .dynamic) { + this.dynamic_import_file_size_store_lock.lock(); + defer this.dynamic_import_file_size_store_lock.unlock(); + var dynamic_import_file_size = this.dynamic_import_file_size_store.getOrPut(mod.module_id) catch unreachable; + if (!dynamic_import_file_size.found_existing) { + var fd = _resolved_import.file_fd; + var can_close = false; + if (fd == 0) { + dynamic_import_file_size.value_ptr.* = 0; + fd = (std.fs.openFileAbsolute(path.textZ(), .{}) catch |err| { + this.log.addRangeWarningFmt( + &source, + import_record.range, + worker.allocator, + "{s} opening file: \"{s}\"", + .{ @errorName(err), path.text }, + ) catch unreachable; + continue; + }).handle; + can_close = true; + Fs.FileSystem.setMaxFd(fd); + } + + defer { + if (can_close and bundler.fs.fs.needToCloseFiles()) { + var _file = std.fs.File{ .handle = fd }; + _file.close(); + _resolved_import.file_fd = 0; + } else if (FeatureFlags.store_file_descriptors) { + _resolved_import.file_fd = fd; + } + } - if (std.fs.path.dirname(file_path.pretty)) |dirname| { - try output_dir.makePath(dirname); + var file = std.fs.File{ .handle = fd }; + var stat = file.stat() catch |err| { + this.log.addRangeWarningFmt( + &source, + import_record.range, + worker.allocator, + "{s} stat'ing file: \"{s}\"", + .{ @errorName(err), path.text }, + ) catch unreachable; + dynamic_import_file_size.value_ptr.* = 0; + continue; + }; + + dynamic_import_file_size.value_ptr.* = @truncate(u32, stat.size); + } + + if (dynamic_import_file_size.value_ptr.* > 1024 * 100) + continue; + } + } + + std.debug.assert(mod.module_id != 0); + try this.queue.upsert( + mod.module_id, + _resolved_import.*, + ); + } else { + try this.queue.upsert( + _resolved_import.hash( + this.bundler.fs.top_level_dir, + loader_, + ), + _resolved_import.*, + ); + } + } else |err| { + switch (err) { + error.ModuleNotFound => { + if (isPackagePath(import_record.path.text)) { + if (this.bundler.options.platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { + try log.addResolveErrorWithTextDupe( + &source, + import_record.range, + this.allocator, + "Could not resolve Node.js builtin: \"{s}\".", + .{import_record.path.text}, + import_record.kind, + ); + } else { + try log.addResolveErrorWithTextDupe( + &source, + import_record.range, + this.allocator, + "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?", + .{import_record.path.text}, + import_record.kind, + ); + } + } else { + try log.addResolveErrorWithTextDupe( + &source, + import_record.range, + this.allocator, + "Could not resolve: \"{s}\"", + .{ + import_record.path.text, + }, + import_record.kind, + ); + } + }, + // assume other errors are already in the log + else => {}, + } + } + } + } + }, + else => {}, } - file = try output_dir.createFile(file_path.pretty, .{}); - } else { - file = outstream; } + } + }; - switch (loader) { - .jsx, .tsx, .js, .ts, .json => { - var result = bundler.parse( - bundler.allocator, - file_path, - loader, - resolve_result.dirname_fd, - null, - null, - client_entry_point_, - ) orelse { - return null; - }; + pub const BuildResolveResultPair = struct { + written: usize, + input_fd: ?StoredFileDescriptorType, + empty: bool = false, + }; + pub fn buildWithResolveResult( + bundler: *ThisBundler, + resolve_result: _resolver.Result, + allocator: *std.mem.Allocator, + loader: options.Loader, + comptime Writer: type, + writer: Writer, + comptime import_path_format: options.BundleOptions.ImportPathFormat, + file_descriptor: ?StoredFileDescriptorType, + filepath_hash: u32, + comptime WatcherType: type, + watcher: *WatcherType, + client_entry_point: ?*ClientEntryPoint, + ) !BuildResolveResultPair { + if (resolve_result.is_external) { + return BuildResolveResultPair{ + .written = 0, + .input_fd = null, + }; + } - try bundler.linker.link( - file_path, - &result, - import_path_format, - false, - ); + errdefer bundler.resetStore(); - output_file.size = try bundler.print( - result, - js_printer.FileWriter, - js_printer.NewFileWriter(file), - .esm, - ); + var file_path = (resolve_result.pathConst() orelse { + return BuildResolveResultPair{ + .written = 0, + .input_fd = null, + }; + }).*; - var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty); + if (strings.indexOf(file_path.text, bundler.fs.top_level_dir)) |i| { + file_path.pretty = file_path.text[i + bundler.fs.top_level_dir.len ..]; + } else if (!file_path.is_symlink) { + file_path.pretty = allocator.dupe(u8, bundler.fs.relativeTo(file_path.text)) catch unreachable; + } - file_op.fd = file.handle; + var old_bundler_allocator = bundler.allocator; + bundler.allocator = allocator; + defer bundler.allocator = old_bundler_allocator; + var old_linker_allocator = bundler.linker.allocator; + defer bundler.linker.allocator = old_linker_allocator; + bundler.linker.allocator = allocator; - file_op.is_tmpdir = false; + switch (loader) { + .css => { + const CSSBundlerHMR = Css.NewBundler( + Writer, + @TypeOf(&bundler.linker), + @TypeOf(&bundler.resolver.caches.fs), + WatcherType, + @TypeOf(bundler.fs), + true, + ); - if (Outstream == std.fs.Dir) { - file_op.dir = outstream.fd; + const CSSBundler = Css.NewBundler( + Writer, + @TypeOf(&bundler.linker), + @TypeOf(&bundler.resolver.caches.fs), + WatcherType, + @TypeOf(bundler.fs), + false, + ); - if (bundler.fs.fs.needToCloseFiles()) { - file.close(); - file_op.fd = 0; + return BuildResolveResultPair{ + .written = brk: { + if (bundler.options.hot_module_reloading) { + break :brk (try CSSBundlerHMR.bundle( + file_path.text, + bundler.fs, + writer, + watcher, + &bundler.resolver.caches.fs, + filepath_hash, + file_descriptor, + allocator, + bundler.log, + &bundler.linker, + )).written; + } else { + break :brk (try CSSBundler.bundle( + file_path.text, + bundler.fs, + writer, + watcher, + &bundler.resolver.caches.fs, + filepath_hash, + file_descriptor, + allocator, + bundler.log, + &bundler.linker, + )).written; } - } + }, + .input_fd = file_descriptor, + }; + }, + else => { + var result = bundler.parse( + allocator, + file_path, + loader, + resolve_result.dirname_fd, + file_descriptor, + filepath_hash, + client_entry_point, + ) orelse { + bundler.resetStore(); + return BuildResolveResultPair{ + .written = 0, + .input_fd = null, + }; + }; - output_file.value = .{ .move = file_op }; - }, - .css => { - const CSSWriter = Css.NewWriter( - std.fs.File, - @TypeOf(&bundler.linker), - import_path_format, - void, - ); - const entry = bundler.resolver.caches.fs.readFile( - bundler.fs, - file_path.text, - resolve_result.dirname_fd, - !cache_files, - null, - ) catch return null; + if (result.empty) { + return BuildResolveResultPair{ .written = 0, .input_fd = result.input_fd, .empty = true }; + } - const _file = Fs.File{ .path = file_path, .contents = entry.contents }; - var source = try logger.Source.initFile(_file, bundler.allocator); - source.contents_is_recycled = !cache_files; - var css_writer = CSSWriter.init( - &source, - file, - &bundler.linker, - bundler.log, - ); - var did_warn = false; - try css_writer.run(bundler.log, bundler.allocator, &did_warn); - output_file.size = css_writer.written; - var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty); + try bundler.linker.link(file_path, &result, import_path_format, false); - file_op.fd = file.handle; + return BuildResolveResultPair{ + .written = switch (result.ast.exports_kind) { + .esm => try bundler.print( + result, + Writer, + writer, + .esm, + ), + .cjs => try bundler.print( + result, + Writer, + writer, + .cjs, + ), + else => unreachable, + }, + .input_fd = result.input_fd, + }; + }, + } + } - file_op.is_tmpdir = false; + pub fn buildWithResolveResultEager( + bundler: *ThisBundler, + resolve_result: _resolver.Result, + comptime import_path_format: options.BundleOptions.ImportPathFormat, + comptime Outstream: type, + outstream: Outstream, + client_entry_point_: ?*ClientEntryPoint, + ) !?options.OutputFile { + if (resolve_result.is_external) { + return null; + } - if (Outstream == std.fs.Dir) { - file_op.dir = outstream.fd; + var file_path = (resolve_result.pathConst() orelse return null).*; - if (bundler.fs.fs.needToCloseFiles()) { - file.close(); - file_op.fd = 0; - } - } + // Step 1. Parse & scan + const loader = bundler.options.loader(file_path.name.ext); - output_file.value = .{ .move = file_op }; - }, - .file => { - var hashed_name = try bundler.linker.getHashedFilename(file_path, null); - var pathname = try bundler.allocator.alloc(u8, hashed_name.len + file_path.name.ext.len); - std.mem.copy(u8, pathname, hashed_name); - std.mem.copy(u8, pathname[hashed_name.len..], file_path.name.ext); - const dir = if (bundler.options.output_dir_handle) |output_handle| output_handle.fd else 0; - - output_file.value = .{ - .copy = options.OutputFile.FileOperation{ - .pathname = pathname, - .dir = dir, - .is_outdir = true, - }, - }; - }, + if (client_entry_point_) |client_entry_point| { + file_path = client_entry_point.source.path; + } - // // TODO: - // else => {}, - } + file_path.pretty = Linker.relative_paths_list.append(string, bundler.fs.relativeTo(file_path.text)) catch unreachable; + + var output_file = options.OutputFile{ + .input = file_path, + .loader = loader, + .value = undefined, + }; - return output_file; + var file: std.fs.File = undefined; + + if (Outstream == std.fs.Dir) { + const output_dir = outstream; + + if (std.fs.path.dirname(file_path.pretty)) |dirname| { + try output_dir.makePath(dirname); + } + file = try output_dir.createFile(file_path.pretty, .{}); + } else { + file = outstream; } - pub fn print( - bundler: *ThisBundler, - result: ParseResult, - comptime Writer: type, - writer: Writer, - comptime format: js_printer.Format, - ) !usize { - const ast = result.ast; - var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols}); - - return switch (format) { - .cjs => try js_printer.printCommonJS( - Writer, - writer, - ast, - js_ast.Symbol.Map.initList(symbols), - &result.source, - false, - js_printer.Options{ - .to_module_ref = Ref.RuntimeRef, - .externals = ast.externals, - .runtime_imports = ast.runtime_imports, - .require_ref = ast.require_ref, - .css_import_behavior = bundler.options.cssImportBehavior(), - }, - Linker, - &bundler.linker, - ), - .esm => try js_printer.printAst( - Writer, - writer, - ast, - js_ast.Symbol.Map.initList(symbols), - &result.source, + switch (loader) { + .jsx, .tsx, .js, .ts, .json => { + var result = bundler.parse( + bundler.allocator, + file_path, + loader, + resolve_result.dirname_fd, + null, + null, + client_entry_point_, + ) orelse { + return null; + }; + + try bundler.linker.link( + file_path, + &result, + import_path_format, false, - js_printer.Options{ - .to_module_ref = Ref.RuntimeRef, - .externals = ast.externals, - .runtime_imports = ast.runtime_imports, - .require_ref = ast.require_ref, + ); - .css_import_behavior = bundler.options.cssImportBehavior(), - }, - Linker, - &bundler.linker, - ), - }; - } + output_file.size = try bundler.print( + result, + js_printer.FileWriter, + js_printer.NewFileWriter(file), + .esm, + ); - pub fn parse( - bundler: *ThisBundler, - allocator: *std.mem.Allocator, - path: Fs.Path, - loader: options.Loader, - // only used when file_descriptor is null - dirname_fd: StoredFileDescriptorType, - file_descriptor: ?StoredFileDescriptorType, - file_hash: ?u32, - client_entry_point_: anytype, - ) ?ParseResult { - if (FeatureFlags.tracing) { - bundler.timer.start(); - } - defer { - if (FeatureFlags.tracing) { - bundler.timer.stop(); - bundler.elapsed += bundler.timer.elapsed; - } - } - var result: ParseResult = undefined; - var input_fd: ?StoredFileDescriptorType = null; + var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty); - const source: logger.Source = brk: { - if (client_entry_point_) |client_entry_point| { - if (@hasField(std.meta.Child(@TypeOf(client_entry_point)), "source")) { - break :brk client_entry_point.source; - } - } + file_op.fd = file.handle; - if (strings.eqlComptime(path.namespace, "node")) { - if (NodeFallbackModules.contentsFromPath(path.text)) |code| { - break :brk logger.Source.initPathString(path.text, code); - } + file_op.is_tmpdir = false; - break :brk logger.Source.initPathString(path.text, ""); + if (Outstream == std.fs.Dir) { + file_op.dir = outstream.fd; + + if (bundler.fs.fs.needToCloseFiles()) { + file.close(); + file_op.fd = 0; + } } + output_file.value = .{ .move = file_op }; + }, + .css => { + const CSSWriter = Css.NewWriter( + std.fs.File, + @TypeOf(&bundler.linker), + import_path_format, + void, + ); const entry = bundler.resolver.caches.fs.readFile( bundler.fs, - path.text, - dirname_fd, - true, - file_descriptor, - ) catch |err| { - bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} reading \"{s}\"", .{ @errorName(err), path.text }) catch {}; - return null; - }; - input_fd = entry.fd; - break :brk logger.Source.initRecycledFile(Fs.File{ .path = path, .contents = entry.contents }, bundler.allocator) catch return null; - }; + file_path.text, + resolve_result.dirname_fd, + !cache_files, + null, + ) catch return null; + + const _file = Fs.File{ .path = file_path, .contents = entry.contents }; + var source = try logger.Source.initFile(_file, bundler.allocator); + source.contents_is_recycled = !cache_files; + var css_writer = CSSWriter.init( + &source, + file, + &bundler.linker, + bundler.log, + ); + var did_warn = false; + try css_writer.run(bundler.log, bundler.allocator, &did_warn); + output_file.size = css_writer.written; + var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty); - if (source.contents.len == 0 or (source.contents.len < 33 and std.mem.trim(u8, source.contents, "\n\r ").len == 0)) { - return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty }; - } + file_op.fd = file.handle; - switch (loader) { - .js, - .jsx, - .ts, - .tsx, - => { - var jsx = bundler.options.jsx; - jsx.parse = loader.isJSX(); - var opts = js_parser.Parser.Options.init(jsx, loader); - opts.enable_bundling = false; - opts.transform_require_to_import = true; - opts.can_import_from_bundle = bundler.options.node_modules_bundle != null; - - // HMR is enabled when devserver is running - // unless you've explicitly disabled it - // or you're running in SSR - // or the file is a node_module - opts.features.hot_module_reloading = bundler.options.hot_module_reloading and - bundler.options.platform != .bun and - (!opts.can_import_from_bundle or - (opts.can_import_from_bundle and !path.isNodeModule())); - opts.features.react_fast_refresh = opts.features.hot_module_reloading and - jsx.parse and - bundler.options.jsx.supports_fast_refresh; - opts.filepath_hash_for_hmr = file_hash orelse 0; - opts.warn_about_unbundled_modules = bundler.options.platform != .bun; - const value = (bundler.resolver.caches.js.parse( - allocator, - opts, - bundler.options.define, - bundler.log, - &source, - ) catch null) orelse return null; - return ParseResult{ - .ast = value, - .source = source, - .loader = loader, - .input_fd = input_fd, - }; - }, - .json => { - var expr = json_parser.ParseJSON(&source, bundler.log, allocator) catch return null; - var stmt = js_ast.Stmt.alloc(allocator, js_ast.S.ExportDefault, js_ast.S.ExportDefault{ - .value = js_ast.StmtOrExpr{ .expr = expr }, - .default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} }, - }, logger.Loc{ .start = 0 }); - var stmts = allocator.alloc(js_ast.Stmt, 1) catch unreachable; - stmts[0] = stmt; - var parts = allocator.alloc(js_ast.Part, 1) catch unreachable; - parts[0] = js_ast.Part{ .stmts = stmts }; - - return ParseResult{ - .ast = js_ast.Ast.initTest(parts), - .source = source, - .loader = loader, - .input_fd = input_fd, - }; - }, - .css => {}, - else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }), - } + file_op.is_tmpdir = false; - return null; - } + if (Outstream == std.fs.Dir) { + file_op.dir = outstream.fd; - // This is public so it can be used by the HTTP handler when matching against public dir. - pub threadlocal var tmp_buildfile_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; - threadlocal var tmp_buildfile_buf2: [std.fs.MAX_PATH_BYTES]u8 = undefined; + if (bundler.fs.fs.needToCloseFiles()) { + file.close(); + file_op.fd = 0; + } + } - // We try to be mostly stateless when serving - // This means we need a slightly different resolver setup - pub fn buildFile( - bundler: *ThisBundler, - log: *logger.Log, - allocator: *std.mem.Allocator, - relative_path: string, - _extension: string, - comptime client_entry_point_enabled: bool, - comptime serve_as_package_path: bool, - ) !ServeResult { - var extension = _extension; - var old_log = bundler.log; - var old_allocator = bundler.allocator; - - bundler.setLog(log); - defer bundler.setLog(old_log); - - if (strings.eqlComptime(relative_path, "__runtime.js")) { - return ServeResult{ - .file = options.OutputFile.initBuf(runtime.Runtime.sourceContent(), "__runtime.js", .js), - .mime_type = MimeType.javascript, + output_file.value = .{ .move = file_op }; + }, + .file => { + var hashed_name = try bundler.linker.getHashedFilename(file_path, null); + var pathname = try bundler.allocator.alloc(u8, hashed_name.len + file_path.name.ext.len); + std.mem.copy(u8, pathname, hashed_name); + std.mem.copy(u8, pathname[hashed_name.len..], file_path.name.ext); + const dir = if (bundler.options.output_dir_handle) |output_handle| output_handle.fd else 0; + + output_file.value = .{ + .copy = options.OutputFile.FileOperation{ + .pathname = pathname, + .dir = dir, + .is_outdir = true, + }, }; - } + }, - var absolute_path = if (comptime serve_as_package_path) - relative_path - else - resolve_path.joinAbsStringBuf( - bundler.fs.top_level_dir, - &tmp_buildfile_buf, - &([_][]const u8{relative_path}), - .auto, - ); + // // TODO: + // else => {}, + } - defer { - js_ast.Expr.Data.Store.reset(); - js_ast.Stmt.Data.Store.reset(); + return output_file; + } + + pub fn print( + bundler: *ThisBundler, + result: ParseResult, + comptime Writer: type, + writer: Writer, + comptime format: js_printer.Format, + ) !usize { + const ast = result.ast; + var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols}); + + return switch (format) { + .cjs => try js_printer.printCommonJS( + Writer, + writer, + ast, + js_ast.Symbol.Map.initList(symbols), + &result.source, + false, + js_printer.Options{ + .to_module_ref = Ref.RuntimeRef, + .externals = ast.externals, + .runtime_imports = ast.runtime_imports, + .require_ref = ast.require_ref, + .css_import_behavior = bundler.options.cssImportBehavior(), + }, + Linker, + &bundler.linker, + ), + .esm => try js_printer.printAst( + Writer, + writer, + ast, + js_ast.Symbol.Map.initList(symbols), + &result.source, + false, + js_printer.Options{ + .to_module_ref = Ref.RuntimeRef, + .externals = ast.externals, + .runtime_imports = ast.runtime_imports, + .require_ref = ast.require_ref, + + .css_import_behavior = bundler.options.cssImportBehavior(), + }, + Linker, + &bundler.linker, + ), + }; + } + + pub fn parse( + bundler: *ThisBundler, + allocator: *std.mem.Allocator, + path: Fs.Path, + loader: options.Loader, + // only used when file_descriptor is null + dirname_fd: StoredFileDescriptorType, + file_descriptor: ?StoredFileDescriptorType, + file_hash: ?u32, + client_entry_point_: anytype, + ) ?ParseResult { + if (FeatureFlags.tracing) { + bundler.timer.start(); + } + defer { + if (FeatureFlags.tracing) { + bundler.timer.stop(); + bundler.elapsed += bundler.timer.elapsed; } + } + var result: ParseResult = undefined; + var input_fd: ?StoredFileDescriptorType = null; - // If the extension is .js, omit it. - // if (absolute_path.len > ".js".len and strings.eqlComptime(absolute_path[absolute_path.len - ".js".len ..], ".js")) { - // absolute_path = absolute_path[0 .. absolute_path.len - ".js".len]; - // } + const source: logger.Source = brk: { + if (client_entry_point_) |client_entry_point| { + if (@hasField(std.meta.Child(@TypeOf(client_entry_point)), "source")) { + break :brk client_entry_point.source; + } + } - const resolved = if (comptime !client_entry_point_enabled) (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .stmt)) else brk: { - const absolute_pathname = Fs.PathName.init(absolute_path); - - const loader_for_ext = bundler.options.loader(absolute_pathname.ext); - - // The expected pathname looks like: - // /pages/index.entry.tsx - // /pages/index.entry.js - // /pages/index.entry.ts - // /pages/index.entry.jsx - if (loader_for_ext.supportsClientEntryPoint()) { - const absolute_pathname_pathname = Fs.PathName.init(absolute_pathname.base); - - if (strings.eqlComptime(absolute_pathname_pathname.ext, ".entry")) { - const trail_dir = absolute_pathname.dirWithTrailingSlash(); - var len: usize = trail_dir.len; - std.mem.copy(u8, tmp_buildfile_buf2[0..len], trail_dir); - - std.mem.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname_pathname.base); - len += absolute_pathname_pathname.base.len; - std.mem.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname.ext); - len += absolute_pathname.ext.len; - std.debug.assert(len > 0); - const decoded_entry_point_path = tmp_buildfile_buf2[0..len]; - break :brk (try bundler.resolver.resolve(bundler.fs.top_level_dir, decoded_entry_point_path, .entry_point)); - } + if (strings.eqlComptime(path.namespace, "node")) { + if (NodeFallbackModules.contentsFromPath(path.text)) |code| { + break :brk logger.Source.initPathString(path.text, code); } - break :brk (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .stmt)); + break :brk logger.Source.initPathString(path.text, ""); + } + + const entry = bundler.resolver.caches.fs.readFile( + bundler.fs, + path.text, + dirname_fd, + true, + file_descriptor, + ) catch |err| { + bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} reading \"{s}\"", .{ @errorName(err), path.text }) catch {}; + return null; }; + input_fd = entry.fd; + break :brk logger.Source.initRecycledFile(Fs.File{ .path = path, .contents = entry.contents }, bundler.allocator) catch return null; + }; - const path = (resolved.pathConst() orelse return error.ModuleNotFound); + if (source.contents.len == 0 or (source.contents.len < 33 and std.mem.trim(u8, source.contents, "\n\r ").len == 0)) { + return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty }; + } - const loader = bundler.options.loader(path.name.ext); - const mime_type_ext = bundler.options.out_extensions.get(path.name.ext) orelse path.name.ext; + switch (loader) { + .js, + .jsx, + .ts, + .tsx, + => { + var jsx = bundler.options.jsx; + jsx.parse = loader.isJSX(); + var opts = js_parser.Parser.Options.init(jsx, loader); + opts.enable_bundling = false; + opts.transform_require_to_import = true; + opts.can_import_from_bundle = bundler.options.node_modules_bundle != null; + + // HMR is enabled when devserver is running + // unless you've explicitly disabled it + // or you're running in SSR + // or the file is a node_module + opts.features.hot_module_reloading = bundler.options.hot_module_reloading and + bundler.options.platform != .bun and + (!opts.can_import_from_bundle or + (opts.can_import_from_bundle and !path.isNodeModule())); + opts.features.react_fast_refresh = opts.features.hot_module_reloading and + jsx.parse and + bundler.options.jsx.supports_fast_refresh; + opts.filepath_hash_for_hmr = file_hash orelse 0; + opts.warn_about_unbundled_modules = bundler.options.platform != .bun; + const value = (bundler.resolver.caches.js.parse( + allocator, + opts, + bundler.options.define, + bundler.log, + &source, + ) catch null) orelse return null; + return ParseResult{ + .ast = value, + .source = source, + .loader = loader, + .input_fd = input_fd, + }; + }, + .json => { + var expr = json_parser.ParseJSON(&source, bundler.log, allocator) catch return null; + var stmt = js_ast.Stmt.alloc(allocator, js_ast.S.ExportDefault, js_ast.S.ExportDefault{ + .value = js_ast.StmtOrExpr{ .expr = expr }, + .default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} }, + }, logger.Loc{ .start = 0 }); + var stmts = allocator.alloc(js_ast.Stmt, 1) catch unreachable; + stmts[0] = stmt; + var parts = allocator.alloc(js_ast.Part, 1) catch unreachable; + parts[0] = js_ast.Part{ .stmts = stmts }; - switch (loader) { - .js, .jsx, .ts, .tsx, .css => { - return ServeResult{ - .file = options.OutputFile.initPending(loader, resolved), - .mime_type = MimeType.byLoader( - loader, - mime_type_ext[1..], - ), - }; - }, - .json => { - return ServeResult{ - .file = options.OutputFile.initPending(loader, resolved), - .mime_type = MimeType.transpiled_json, - }; - }, - else => { - var abs_path = path.text; - const file = try std.fs.openFileAbsolute(abs_path, .{ .read = true }); - var stat = try file.stat(); - return ServeResult{ - .file = options.OutputFile.initFile(file, abs_path, stat.size), - .mime_type = MimeType.byLoader( - loader, - mime_type_ext[1..], - ), - }; - }, - } + return ParseResult{ + .ast = js_ast.Ast.initTest(parts), + .source = source, + .loader = loader, + .input_fd = input_fd, + }; + }, + .css => {}, + else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }), } - pub fn normalizeEntryPointPath(bundler: *ThisBundler, _entry: string) string { - var paths = [_]string{_entry}; - var entry = bundler.fs.abs(&paths); + return null; + } + + // This is public so it can be used by the HTTP handler when matching against public dir. + pub threadlocal var tmp_buildfile_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + threadlocal var tmp_buildfile_buf2: [std.fs.MAX_PATH_BYTES]u8 = undefined; - std.fs.accessAbsolute(entry, .{}) catch |err| { - return _entry; + // We try to be mostly stateless when serving + // This means we need a slightly different resolver setup + pub fn buildFile( + bundler: *ThisBundler, + log: *logger.Log, + allocator: *std.mem.Allocator, + relative_path: string, + _extension: string, + comptime client_entry_point_enabled: bool, + comptime serve_as_package_path: bool, + ) !ServeResult { + var extension = _extension; + var old_log = bundler.log; + var old_allocator = bundler.allocator; + + bundler.setLog(log); + defer bundler.setLog(old_log); + + if (strings.eqlComptime(relative_path, "__runtime.js")) { + return ServeResult{ + .file = options.OutputFile.initBuf(runtime.Runtime.sourceContent(), "__runtime.js", .js), + .mime_type = MimeType.javascript, }; + } - entry = bundler.fs.relativeTo(entry); - - if (!strings.startsWith(entry, "./")) { - // Entry point paths without a leading "./" are interpreted as package - // paths. This happens because they go through general path resolution - // like all other import paths so that plugins can run on them. Requiring - // a leading "./" for a relative path simplifies writing plugins because - // entry points aren't a special case. - // - // However, requiring a leading "./" also breaks backward compatibility - // and makes working with the CLI more difficult. So attempt to insert - // "./" automatically when needed. We don't want to unconditionally insert - // a leading "./" because the path may not be a file system path. For - // example, it may be a URL. So only insert a leading "./" when the path - // is an exact match for an existing file. - var __entry = bundler.allocator.alloc(u8, "./".len + entry.len) catch unreachable; - __entry[0] = '.'; - __entry[1] = '/'; - std.mem.copy(u8, __entry[2..__entry.len], entry); - entry = __entry; - } + var absolute_path = if (comptime serve_as_package_path) + relative_path + else + resolve_path.joinAbsStringBuf( + bundler.fs.top_level_dir, + &tmp_buildfile_buf, + &([_][]const u8{relative_path}), + .auto, + ); - return entry; + defer { + js_ast.Expr.Data.Store.reset(); + js_ast.Stmt.Data.Store.reset(); } - fn enqueueEntryPoints(bundler: *ThisBundler, entry_points: []_resolver.Result, comptime normalize_entry_point: bool) usize { - var entry_point_i: usize = 0; + // If the extension is .js, omit it. + // if (absolute_path.len > ".js".len and strings.eqlComptime(absolute_path[absolute_path.len - ".js".len ..], ".js")) { + // absolute_path = absolute_path[0 .. absolute_path.len - ".js".len]; + // } + + const resolved = if (comptime !client_entry_point_enabled) (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .stmt)) else brk: { + const absolute_pathname = Fs.PathName.init(absolute_path); + + const loader_for_ext = bundler.options.loader(absolute_pathname.ext); + + // The expected pathname looks like: + // /pages/index.entry.tsx + // /pages/index.entry.js + // /pages/index.entry.ts + // /pages/index.entry.jsx + if (loader_for_ext.supportsClientEntryPoint()) { + const absolute_pathname_pathname = Fs.PathName.init(absolute_pathname.base); + + if (strings.eqlComptime(absolute_pathname_pathname.ext, ".entry")) { + const trail_dir = absolute_pathname.dirWithTrailingSlash(); + var len: usize = trail_dir.len; + std.mem.copy(u8, tmp_buildfile_buf2[0..len], trail_dir); + + std.mem.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname_pathname.base); + len += absolute_pathname_pathname.base.len; + std.mem.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname.ext); + len += absolute_pathname.ext.len; + std.debug.assert(len > 0); + const decoded_entry_point_path = tmp_buildfile_buf2[0..len]; + break :brk (try bundler.resolver.resolve(bundler.fs.top_level_dir, decoded_entry_point_path, .entry_point)); + } + } - for (bundler.options.entry_points) |_entry| { - var entry: string = if (comptime normalize_entry_point) bundler.normalizeEntryPointPath(_entry) else _entry; + break :brk (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .stmt)); + }; - defer { - js_ast.Expr.Data.Store.reset(); - js_ast.Stmt.Data.Store.reset(); - } + const path = (resolved.pathConst() orelse return error.ModuleNotFound); - const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| { - Output.prettyError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) }); - continue; + const loader = bundler.options.loader(path.name.ext); + const mime_type_ext = bundler.options.out_extensions.get(path.name.ext) orelse path.name.ext; + + switch (loader) { + .js, .jsx, .ts, .tsx, .css => { + return ServeResult{ + .file = options.OutputFile.initPending(loader, resolved), + .mime_type = MimeType.byLoader( + loader, + mime_type_ext[1..], + ), + }; + }, + .json => { + return ServeResult{ + .file = options.OutputFile.initPending(loader, resolved), + .mime_type = MimeType.transpiled_json, + }; + }, + else => { + var abs_path = path.text; + const file = try std.fs.openFileAbsolute(abs_path, .{ .read = true }); + var stat = try file.stat(); + return ServeResult{ + .file = options.OutputFile.initFile(file, abs_path, stat.size), + .mime_type = MimeType.byLoader( + loader, + mime_type_ext[1..], + ), }; + }, + } + } - if (result.pathConst() == null) { - Output.prettyError("\"{s}\" is disabled due to \"browser\" field in package.json.\n", .{ - entry, - }); - continue; - } + pub fn normalizeEntryPointPath(bundler: *ThisBundler, _entry: string) string { + var paths = [_]string{_entry}; + var entry = bundler.fs.abs(&paths); - if (bundler.linker.enqueueResolveResult(&result) catch unreachable) { - entry_points[entry_point_i] = result; - entry_point_i += 1; - } - } + std.fs.accessAbsolute(entry, .{}) catch |err| { + return _entry; + }; - return entry_point_i; + entry = bundler.fs.relativeTo(entry); + + if (!strings.startsWith(entry, "./")) { + // Entry point paths without a leading "./" are interpreted as package + // paths. This happens because they go through general path resolution + // like all other import paths so that plugins can run on them. Requiring + // a leading "./" for a relative path simplifies writing plugins because + // entry points aren't a special case. + // + // However, requiring a leading "./" also breaks backward compatibility + // and makes working with the CLI more difficult. So attempt to insert + // "./" automatically when needed. We don't want to unconditionally insert + // a leading "./" because the path may not be a file system path. For + // example, it may be a URL. So only insert a leading "./" when the path + // is an exact match for an existing file. + var __entry = bundler.allocator.alloc(u8, "./".len + entry.len) catch unreachable; + __entry[0] = '.'; + __entry[1] = '/'; + std.mem.copy(u8, __entry[2..__entry.len], entry); + entry = __entry; } - pub fn bundle( - allocator: *std.mem.Allocator, - log: *logger.Log, - opts: Api.TransformOptions, - ) !options.TransformResult { - var bundler = try ThisBundler.init(allocator, log, opts, null, null); - bundler.configureLinker(); - try bundler.configureRouter(false); - try bundler.configureDefines(); + return entry; + } - var skip_normalize = false; - var load_from_routes = false; - if (bundler.options.routes.routes_enabled and bundler.options.entry_points.len == 0) { - if (bundler.router) |router| { - bundler.options.entry_points = try router.getEntryPoints(allocator); - skip_normalize = true; - load_from_routes = true; - } + fn enqueueEntryPoints(bundler: *ThisBundler, entry_points: []_resolver.Result, comptime normalize_entry_point: bool) usize { + var entry_point_i: usize = 0; + + for (bundler.options.entry_points) |_entry| { + var entry: string = if (comptime normalize_entry_point) bundler.normalizeEntryPointPath(_entry) else _entry; + + defer { + js_ast.Expr.Data.Store.reset(); + js_ast.Stmt.Data.Store.reset(); } - if (bundler.options.write and bundler.options.output_dir.len > 0) {} + const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| { + Output.prettyError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) }); + continue; + }; - // 100.00 µs std.fifo.LinearFifo(resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity - if (bundler.options.resolve_mode != .lazy) { - try bundler.resolve_queue.ensureUnusedCapacity(3); + if (result.pathConst() == null) { + Output.prettyError("\"{s}\" is disabled due to \"browser\" field in package.json.\n", .{ + entry, + }); + continue; } - var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len); - if (skip_normalize) { - entry_points = entry_points[0..bundler.enqueueEntryPoints(entry_points, false)]; - } else { - entry_points = entry_points[0..bundler.enqueueEntryPoints(entry_points, true)]; + if (bundler.linker.enqueueResolveResult(&result) catch unreachable) { + entry_points[entry_point_i] = result; + entry_point_i += 1; } + } - if (log.level == .verbose) { - bundler.resolver.debug_logs = try DebugLogs.init(allocator); + return entry_point_i; + } + + pub fn bundle( + allocator: *std.mem.Allocator, + log: *logger.Log, + opts: Api.TransformOptions, + ) !options.TransformResult { + var bundler = try ThisBundler.init(allocator, log, opts, null, null); + bundler.configureLinker(); + try bundler.configureRouter(false); + try bundler.configureDefines(); + + var skip_normalize = false; + var load_from_routes = false; + if (bundler.options.routes.routes_enabled and bundler.options.entry_points.len == 0) { + if (bundler.router) |router| { + bundler.options.entry_points = try router.getEntryPoints(allocator); + skip_normalize = true; + load_from_routes = true; } + } - var did_start = false; + if (bundler.options.write and bundler.options.output_dir.len > 0) {} - if (bundler.options.output_dir_handle == null) { - const outstream = std.io.getStdOut(); + // 100.00 µs std.fifo.LinearFifo(resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity + if (bundler.options.resolve_mode != .lazy) { + try bundler.resolve_queue.ensureUnusedCapacity(3); + } - if (load_from_routes) { - if (bundler.options.framework) |*framework| { - if (framework.client.isEnabled()) { - did_start = true; - try switch (bundler.options.import_path_format) { - .relative => bundler.processResolveQueue(.relative, true, @TypeOf(outstream), outstream), - .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, @TypeOf(outstream), outstream), - .absolute_url => bundler.processResolveQueue(.absolute_url, true, @TypeOf(outstream), outstream), - .absolute_path => bundler.processResolveQueue(.absolute_path, true, @TypeOf(outstream), outstream), - .package_path => bundler.processResolveQueue(.package_path, true, @TypeOf(outstream), outstream), - }; - } + var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len); + if (skip_normalize) { + entry_points = entry_points[0..bundler.enqueueEntryPoints(entry_points, false)]; + } else { + entry_points = entry_points[0..bundler.enqueueEntryPoints(entry_points, true)]; + } + + if (log.level == .verbose) { + bundler.resolver.debug_logs = try DebugLogs.init(allocator); + } + + var did_start = false; + + if (bundler.options.output_dir_handle == null) { + const outstream = std.io.getStdOut(); + + if (load_from_routes) { + if (bundler.options.framework) |*framework| { + if (framework.client.isEnabled()) { + did_start = true; + try switch (bundler.options.import_path_format) { + .relative => bundler.processResolveQueue(.relative, true, @TypeOf(outstream), outstream), + .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, @TypeOf(outstream), outstream), + .absolute_url => bundler.processResolveQueue(.absolute_url, true, @TypeOf(outstream), outstream), + .absolute_path => bundler.processResolveQueue(.absolute_path, true, @TypeOf(outstream), outstream), + .package_path => bundler.processResolveQueue(.package_path, true, @TypeOf(outstream), outstream), + }; } } + } - if (!did_start) { - try switch (bundler.options.import_path_format) { - .relative => bundler.processResolveQueue(.relative, false, @TypeOf(outstream), outstream), - .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, false, @TypeOf(outstream), outstream), - .absolute_url => bundler.processResolveQueue(.absolute_url, false, @TypeOf(outstream), outstream), - .absolute_path => bundler.processResolveQueue(.absolute_path, false, @TypeOf(outstream), outstream), - .package_path => bundler.processResolveQueue(.package_path, false, @TypeOf(outstream), outstream), - }; - } - } else { - const output_dir = bundler.options.output_dir_handle orelse { - Output.printError("Invalid or missing output directory.", .{}); - Output.flush(); - Global.crash(); + if (!did_start) { + try switch (bundler.options.import_path_format) { + .relative => bundler.processResolveQueue(.relative, false, @TypeOf(outstream), outstream), + .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, false, @TypeOf(outstream), outstream), + .absolute_url => bundler.processResolveQueue(.absolute_url, false, @TypeOf(outstream), outstream), + .absolute_path => bundler.processResolveQueue(.absolute_path, false, @TypeOf(outstream), outstream), + .package_path => bundler.processResolveQueue(.package_path, false, @TypeOf(outstream), outstream), }; + } + } else { + const output_dir = bundler.options.output_dir_handle orelse { + Output.printError("Invalid or missing output directory.", .{}); + Output.flush(); + Global.crash(); + }; - if (load_from_routes) { - if (bundler.options.framework) |*framework| { - if (framework.client.isEnabled()) { - did_start = true; - try switch (bundler.options.import_path_format) { - .relative => bundler.processResolveQueue(.relative, true, std.fs.Dir, output_dir), - .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, std.fs.Dir, output_dir), - .absolute_url => bundler.processResolveQueue(.absolute_url, true, std.fs.Dir, output_dir), - .absolute_path => bundler.processResolveQueue(.absolute_path, true, std.fs.Dir, output_dir), - .package_path => bundler.processResolveQueue(.package_path, true, std.fs.Dir, output_dir), - }; - } + if (load_from_routes) { + if (bundler.options.framework) |*framework| { + if (framework.client.isEnabled()) { + did_start = true; + try switch (bundler.options.import_path_format) { + .relative => bundler.processResolveQueue(.relative, true, std.fs.Dir, output_dir), + .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, std.fs.Dir, output_dir), + .absolute_url => bundler.processResolveQueue(.absolute_url, true, std.fs.Dir, output_dir), + .absolute_path => bundler.processResolveQueue(.absolute_path, true, std.fs.Dir, output_dir), + .package_path => bundler.processResolveQueue(.package_path, true, std.fs.Dir, output_dir), + }; } } - - if (!did_start) { - try switch (bundler.options.import_path_format) { - .relative => bundler.processResolveQueue(.relative, false, std.fs.Dir, output_dir), - .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, false, std.fs.Dir, output_dir), - .absolute_url => bundler.processResolveQueue(.absolute_url, false, std.fs.Dir, output_dir), - .absolute_path => bundler.processResolveQueue(.absolute_path, false, std.fs.Dir, output_dir), - .package_path => bundler.processResolveQueue(.package_path, false, std.fs.Dir, output_dir), - }; - } } - // if (log.level == .verbose) { - // for (log.msgs.items) |msg| { - // try msg.writeFormat(std.io.getStdOut().writer()); - // } - // } - - if (bundler.linker.any_needs_runtime) { - try bundler.output_files.append( - options.OutputFile.initBuf(runtime.Runtime.sourceContent(), bundler.linker.runtime_source_path, .js), - ); + if (!did_start) { + try switch (bundler.options.import_path_format) { + .relative => bundler.processResolveQueue(.relative, false, std.fs.Dir, output_dir), + .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, false, std.fs.Dir, output_dir), + .absolute_url => bundler.processResolveQueue(.absolute_url, false, std.fs.Dir, output_dir), + .absolute_path => bundler.processResolveQueue(.absolute_path, false, std.fs.Dir, output_dir), + .package_path => bundler.processResolveQueue(.package_path, false, std.fs.Dir, output_dir), + }; } + } - if (FeatureFlags.tracing) { - Output.prettyErrorln( - "<r><d>\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n<r>", - .{ - bundler.resolver.elapsed, - bundler.elapsed, - }, - ); - } + // if (log.level == .verbose) { + // for (log.msgs.items) |msg| { + // try msg.writeFormat(std.io.getStdOut().writer()); + // } + // } - var final_result = try options.TransformResult.init(try allocator.dupe(u8, bundler.result.outbase), bundler.output_files.toOwnedSlice(), log, allocator); - final_result.root_dir = bundler.options.output_dir_handle; - return final_result; + if (bundler.linker.any_needs_runtime) { + try bundler.output_files.append( + options.OutputFile.initBuf(runtime.Runtime.sourceContent(), bundler.linker.runtime_source_path, .js), + ); } - // pub fn processResolveQueueWithThreadPool(bundler) + if (FeatureFlags.tracing) { + Output.prettyErrorln( + "<r><d>\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n<r>", + .{ + bundler.resolver.elapsed, + bundler.elapsed, + }, + ); + } - pub fn processResolveQueue( - bundler: *ThisBundler, - comptime import_path_format: options.BundleOptions.ImportPathFormat, - comptime wrap_entry_point: bool, - comptime Outstream: type, - outstream: Outstream, - ) !void { - // var count: u8 = 0; - while (bundler.resolve_queue.readItem()) |item| { - js_ast.Expr.Data.Store.reset(); - js_ast.Stmt.Data.Store.reset(); + var final_result = try options.TransformResult.init(try allocator.dupe(u8, bundler.result.outbase), bundler.output_files.toOwnedSlice(), log, allocator); + final_result.root_dir = bundler.options.output_dir_handle; + return final_result; + } - // defer count += 1; - - if (comptime wrap_entry_point) { - var path = item.pathConst() orelse unreachable; - const loader = bundler.options.loader(path.name.ext); - - if (item.import_kind == .entry_point and loader.supportsClientEntryPoint()) { - var client_entry_point = try bundler.allocator.create(ClientEntryPoint); - client_entry_point.* = ClientEntryPoint{}; - try client_entry_point.generate(ThisBundler, bundler, path.name, bundler.options.framework.?.client.path); - try bundler.virtual_modules.append(client_entry_point); - - const entry_point_output_file = bundler.buildWithResolveResultEager( - item, - import_path_format, - Outstream, - outstream, - client_entry_point, - ) catch continue orelse continue; - bundler.output_files.append(entry_point_output_file) catch unreachable; - - js_ast.Expr.Data.Store.reset(); - js_ast.Stmt.Data.Store.reset(); - - // At this point, the entry point will be de-duped. - // So we just immediately build it. - var item_not_entrypointed = item; - item_not_entrypointed.import_kind = .stmt; - const original_output_file = bundler.buildWithResolveResultEager( - item_not_entrypointed, - import_path_format, - Outstream, - outstream, - null, - ) catch continue orelse continue; - bundler.output_files.append(original_output_file) catch unreachable; - - continue; - } - } + // pub fn processResolveQueueWithThreadPool(bundler) - const output_file = bundler.buildWithResolveResultEager( - item, - import_path_format, - Outstream, - outstream, - null, - ) catch continue orelse continue; - bundler.output_files.append(output_file) catch unreachable; + pub fn processResolveQueue( + bundler: *ThisBundler, + comptime import_path_format: options.BundleOptions.ImportPathFormat, + comptime wrap_entry_point: bool, + comptime Outstream: type, + outstream: Outstream, + ) !void { + // var count: u8 = 0; + while (bundler.resolve_queue.readItem()) |item| { + js_ast.Expr.Data.Store.reset(); + js_ast.Stmt.Data.Store.reset(); + + // defer count += 1; + + if (comptime wrap_entry_point) { + var path = item.pathConst() orelse unreachable; + const loader = bundler.options.loader(path.name.ext); + + if (item.import_kind == .entry_point and loader.supportsClientEntryPoint()) { + var client_entry_point = try bundler.allocator.create(ClientEntryPoint); + client_entry_point.* = ClientEntryPoint{}; + try client_entry_point.generate(ThisBundler, bundler, path.name, bundler.options.framework.?.client.path); + try bundler.virtual_modules.append(client_entry_point); + + const entry_point_output_file = bundler.buildWithResolveResultEager( + item, + import_path_format, + Outstream, + outstream, + client_entry_point, + ) catch continue orelse continue; + bundler.output_files.append(entry_point_output_file) catch unreachable; - // if (count >= 3) return try bundler.processResolveQueueWithThreadPool(import_path_format, wrap_entry_point, Outstream, outstream); + js_ast.Expr.Data.Store.reset(); + js_ast.Stmt.Data.Store.reset(); + + // At this point, the entry point will be de-duped. + // So we just immediately build it. + var item_not_entrypointed = item; + item_not_entrypointed.import_kind = .stmt; + const original_output_file = bundler.buildWithResolveResultEager( + item_not_entrypointed, + import_path_format, + Outstream, + outstream, + null, + ) catch continue orelse continue; + bundler.output_files.append(original_output_file) catch unreachable; + + continue; + } } - } - }; -} -pub const Bundler = NewBundler(true); -pub const ServeBundler = NewBundler(false); + const output_file = bundler.buildWithResolveResultEager( + item, + import_path_format, + Outstream, + outstream, + null, + ) catch continue orelse continue; + bundler.output_files.append(output_file) catch unreachable; + + // if (count >= 3) return try bundler.processResolveQueueWithThreadPool(import_path_format, wrap_entry_point, Outstream, outstream); + } + } +}; pub const Transformer = struct { opts: Api.TransformOptions, diff --git a/src/cache.zig b/src/cache.zig index 73c093d57..8bd1221f7 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -28,328 +28,251 @@ pub const FsCacheEntry = struct { } }; -pub fn NewCache(comptime cache_files: bool) type { - return struct { - pub const Set = struct { - js: JavaScript, - fs: Fs, - json: Json, - - pub fn init(allocator: *std.mem.Allocator) Set { - return Set{ - .js = JavaScript.init(allocator), - .fs = Fs{ - .mutex = Mutex.init(), - .entries = std.StringHashMap(Fs.Entry).init(allocator), - .shared_buffer = MutableString.init(allocator, 0) catch unreachable, - }, - .json = Json{ - .mutex = Mutex.init(), - .entries = std.StringHashMap(*Json.Entry).init(allocator), - }, - }; - } +pub const Set = struct { + js: JavaScript, + fs: Fs, + json: Json, + + pub fn init(allocator: *std.mem.Allocator) Set { + return Set{ + .js = JavaScript.init(allocator), + .fs = Fs{ + .shared_buffer = MutableString.init(allocator, 0) catch unreachable, + }, + .json = Json{}, }; - pub const Fs = struct { - const Entry = FsCacheEntry; - - mutex: Mutex, - entries: std.StringHashMap(Entry), - shared_buffer: MutableString, - - pub fn deinit(c: *Fs) void { - var iter = c.entries.iterator(); - while (iter.next()) |entry| { - entry.value.deinit(c.entries.allocator); - } - c.entries.deinit(); - } - - pub fn readFileShared( - c: *Fs, - _fs: *fs.FileSystem, - path: [:0]const u8, - dirname_fd: StoredFileDescriptorType, - _file_handle: ?StoredFileDescriptorType, - shared: *MutableString, - ) !Entry { - var rfs = _fs.fs; - - if (comptime cache_files) { - { - c.mutex.lock(); - defer c.mutex.unlock(); - if (c.entries.get(path)) |entry| { - return entry; - } - } - } + } +}; +pub const Fs = struct { + const Entry = FsCacheEntry; - var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined; + shared_buffer: MutableString, - if (_file_handle == null) { - file_handle = try std.fs.openFileAbsoluteZ(path, .{ .read = true }); - } + pub fn deinit(c: *Fs) void { + var iter = c.entries.iterator(); + while (iter.next()) |entry| { + entry.value.deinit(c.entries.allocator); + } + c.entries.deinit(); + } - defer { - if (rfs.needToCloseFiles() and _file_handle == null) { - file_handle.close(); - } - } + pub fn readFileShared( + c: *Fs, + _fs: *fs.FileSystem, + path: [:0]const u8, + dirname_fd: StoredFileDescriptorType, + _file_handle: ?StoredFileDescriptorType, + shared: *MutableString, + ) !Entry { + var rfs = _fs.fs; - // If the file's modification key hasn't changed since it was cached, assume - // the contents of the file are also the same and skip reading the file. - var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: { - switch (err) { - error.FileNotFound, error.AccessDenied => { - return err; - }, - else => { - if (isDebug) { - Output.printError("modkey error: {s}", .{@errorName(err)}); - } - break :handler null; - }, - } - }; - - var file: fs.File = undefined; - if (mod_key) |modk| { - file = rfs.readFileWithHandle(path, modk.size, file_handle, true, shared) catch |err| { - if (isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); - } - return err; - }; - } else { - file = rfs.readFileWithHandle(path, null, file_handle, true, shared) catch |err| { - if (isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); - } - return err; - }; - } + var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined; - const entry = Entry{ - .contents = file.contents, - .mod_key = mod_key, - .fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0, - }; + if (_file_handle == null) { + file_handle = try std.fs.openFileAbsoluteZ(path, .{ .read = true }); + } - if (comptime cache_files) { - c.mutex.lock(); - defer c.mutex.unlock(); - var res = c.entries.getOrPut(path) catch unreachable; + defer { + if (rfs.needToCloseFiles() and _file_handle == null) { + file_handle.close(); + } + } - if (res.found_existing) { - res.value_ptr.*.deinit(c.entries.allocator); + // If the file's modification key hasn't changed since it was cached, assume + // the contents of the file are also the same and skip reading the file. + var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: { + switch (err) { + error.FileNotFound, error.AccessDenied => { + return err; + }, + else => { + if (isDebug) { + Output.printError("modkey error: {s}", .{@errorName(err)}); } - res.value_ptr.* = entry; - return res.value_ptr.*; - } else { - return entry; - } + break :handler null; + }, } + }; - pub fn readFile( - c: *Fs, - _fs: *fs.FileSystem, - path: string, - dirname_fd: StoredFileDescriptorType, - comptime use_shared_buffer: bool, - _file_handle: ?StoredFileDescriptorType, - ) !Entry { - var rfs = _fs.fs; - - if (comptime cache_files) { - { - c.mutex.lock(); - defer c.mutex.unlock(); - if (c.entries.get(path)) |entry| { - return entry; - } - } + var file: fs.File = undefined; + if (mod_key) |modk| { + file = rfs.readFileWithHandle(path, modk.size, file_handle, true, shared) catch |err| { + if (isDebug) { + Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); } - - var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined; - - if (_file_handle == null) { - if (FeatureFlags.store_file_descriptors and dirname_fd > 0) { - file_handle = std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true }) catch |err| brk: { - switch (err) { - error.FileNotFound => { - const handle = try std.fs.openFileAbsolute(path, .{ .read = true }); - Output.prettyErrorln( - "<r><d>Internal error: directory mismatch for directory \"{s}\", fd {d}<r>. You don't need to do anything, but this indicates a bug.", - .{ path, dirname_fd }, - ); - break :brk handle; - }, - else => return err, - } - }; - } else { - file_handle = try std.fs.openFileAbsolute(path, .{ .read = true }); - } + return err; + }; + } else { + file = rfs.readFileWithHandle(path, null, file_handle, true, shared) catch |err| { + if (isDebug) { + Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); } + return err; + }; + } - defer { - if (rfs.needToCloseFiles() and _file_handle == null) { - file_handle.close(); - } - } + return Entry{ + .contents = file.contents, + .mod_key = mod_key, + .fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0, + }; + } - // If the file's modification key hasn't changed since it was cached, assume - // the contents of the file are also the same and skip reading the file. - var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: { + pub fn readFile( + c: *Fs, + _fs: *fs.FileSystem, + path: string, + dirname_fd: StoredFileDescriptorType, + comptime use_shared_buffer: bool, + _file_handle: ?StoredFileDescriptorType, + ) !Entry { + var rfs = _fs.fs; + + var file_handle: std.fs.File = if (_file_handle) |__file| std.fs.File{ .handle = __file } else undefined; + + if (_file_handle == null) { + if (FeatureFlags.store_file_descriptors and dirname_fd > 0) { + file_handle = std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true }) catch |err| brk: { switch (err) { - error.FileNotFound, error.AccessDenied => { - return err; - }, - else => { - if (isDebug) { - Output.printError("modkey error: {s}", .{@errorName(err)}); - } - break :handler null; + error.FileNotFound => { + const handle = try std.fs.openFileAbsolute(path, .{ .read = true }); + Output.prettyErrorln( + "<r><d>Internal error: directory mismatch for directory \"{s}\", fd {d}<r>. You don't need to do anything, but this indicates a bug.", + .{ path, dirname_fd }, + ); + break :brk handle; }, + else => return err, } }; + } else { + file_handle = try std.fs.openFileAbsolute(path, .{ .read = true }); + } + } - var file: fs.File = undefined; - if (mod_key) |modk| { - file = rfs.readFileWithHandle(path, modk.size, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| { - if (isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); - } - return err; - }; - } else { - file = rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| { - if (isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); - } - return err; - }; - } - - const entry = Entry{ - .contents = file.contents, - .mod_key = mod_key, - .fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0, - }; - - if (comptime cache_files) { - c.mutex.lock(); - defer c.mutex.unlock(); - var res = c.entries.getOrPut(path) catch unreachable; + defer { + if (rfs.needToCloseFiles() and _file_handle == null) { + file_handle.close(); + } + } - if (res.found_existing) { - res.value_ptr.*.deinit(c.entries.allocator); + // If the file's modification key hasn't changed since it was cached, assume + // the contents of the file are also the same and skip reading the file. + var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: { + switch (err) { + error.FileNotFound, error.AccessDenied => { + return err; + }, + else => { + if (isDebug) { + Output.printError("modkey error: {s}", .{@errorName(err)}); } - res.value_ptr.* = entry; - return res.value_ptr.*; - } else { - return entry; - } + break :handler null; + }, } }; - pub const Css = struct { - pub const Entry = struct {}; - pub const Result = struct { - ok: bool, - value: void, + var file: fs.File = undefined; + if (mod_key) |modk| { + file = rfs.readFileWithHandle(path, modk.size, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| { + if (isDebug) { + Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); + } + return err; }; - pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result { - Global.notimpl(); - } - }; + } else { + file = rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| { + if (isDebug) { + Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); + } + return err; + }; + } - pub const JavaScript = struct { - mutex: Mutex, - entries: std.StringHashMap(Result), + return Entry{ + .contents = file.contents, + .mod_key = mod_key, + .fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0, + }; + } +}; - pub const Result = js_ast.Result; +pub const Css = struct { + pub const Entry = struct {}; + pub const Result = struct { + ok: bool, + value: void, + }; + pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result { + Global.notimpl(); + } +}; - pub fn init(allocator: *std.mem.Allocator) JavaScript { - return JavaScript{ .mutex = Mutex.init(), .entries = std.StringHashMap(Result).init(allocator) }; - } - // For now, we're not going to cache JavaScript ASTs. - // It's probably only relevant when bundling for production. - pub fn parse( - cache: *@This(), - allocator: *std.mem.Allocator, - opts: js_parser.Parser.Options, - defines: *Define, - log: *logger.Log, - source: *const logger.Source, - ) anyerror!?js_ast.Ast { - var temp_log = logger.Log.init(allocator); - defer temp_log.appendToMaybeRecycled(log, source) catch {}; - var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| { - return null; - }; +pub const JavaScript = struct { + pub const Result = js_ast.Result; - const result = try parser.parse(); + pub fn init(allocator: *std.mem.Allocator) JavaScript { + return JavaScript{}; + } + // For now, we're not going to cache JavaScript ASTs. + // It's probably only relevant when bundling for production. + pub fn parse( + cache: *@This(), + allocator: *std.mem.Allocator, + opts: js_parser.Parser.Options, + defines: *Define, + log: *logger.Log, + source: *const logger.Source, + ) anyerror!?js_ast.Ast { + var temp_log = logger.Log.init(allocator); + defer temp_log.appendToMaybeRecycled(log, source) catch {}; + var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| { + return null; + }; - return if (result.ok) result.ast else null; - } + const result = try parser.parse(); - pub fn scan( - cache: *@This(), - allocator: *std.mem.Allocator, - scan_pass_result: *js_parser.ScanPassResult, - opts: js_parser.Parser.Options, - defines: *Define, - log: *logger.Log, - source: *const logger.Source, - ) anyerror!void { - var temp_log = logger.Log.init(allocator); - defer temp_log.appendToMaybeRecycled(log, source) catch {}; - - var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| { - return; - }; + return if (result.ok) result.ast else null; + } - return try parser.scanImports(scan_pass_result); - } + pub fn scan( + cache: *@This(), + allocator: *std.mem.Allocator, + scan_pass_result: *js_parser.ScanPassResult, + opts: js_parser.Parser.Options, + defines: *Define, + log: *logger.Log, + source: *const logger.Source, + ) anyerror!void { + var temp_log = logger.Log.init(allocator); + defer temp_log.appendToMaybeRecycled(log, source) catch {}; + + var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| { + return; }; - pub const Json = struct { - pub const Entry = struct { - is_tsconfig: bool = false, - source: logger.Source, - expr: ?js_ast.Expr = null, - ok: bool = false, - // msgs: []logger.Msg, - }; - mutex: Mutex, - entries: std.StringHashMap(*Entry), - pub fn init(allocator: *std.mem.Allocator) Json { - return Json{ - .mutex = Mutex.init(), - .entries = std.StringHashMap(Entry).init(allocator), - }; - } - fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr { - var temp_log = logger.Log.init(allocator); - defer { - temp_log.appendTo(log) catch {}; - } - return func(&source, &temp_log, allocator) catch handler: { - break :handler null; - }; - } - pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr { - return try parse(cache, log, source, allocator, false, json_parser.ParseJSON); - } + return try parser.scanImports(scan_pass_result); + } +}; - pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr { - return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig); - } +pub const Json = struct { + pub fn init(allocator: *std.mem.Allocator) Json { + return Json{}; + } + fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr { + var temp_log = logger.Log.init(allocator); + defer { + temp_log.appendTo(log) catch {}; + } + return func(&source, &temp_log, allocator) catch handler: { + break :handler null; }; - }; -} + } + pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr { + return try parse(cache, log, source, allocator, false, json_parser.ParseJSON); + } -pub const Cache = NewCache(true); -pub const ServeCache = NewCache(false); + pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr { + return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig); + } +}; diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index 2a85f48b2..ed81e3572 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -33,7 +33,7 @@ pub const BuildCommand = struct { ); }, .lazy => { - result = try bundler.ServeBundler.bundle( + result = try bundler.Bundler.bundle( ctx.allocator, ctx.log, ctx.args, diff --git a/src/cli/bun_command.zig b/src/cli/bun_command.zig index 66bd2ccee..74e8e7654 100644 --- a/src/cli/bun_command.zig +++ b/src/cli/bun_command.zig @@ -38,7 +38,7 @@ const ServerBundleGeneratorThread = struct { route_conf_: ?Api.LoadedRouteConfig, router: ?Router, ) !void { - var server_bundler = try bundler.ServeBundler.init( + var server_bundler = try bundler.Bundler.init( allocator_, logs, try configureTransformOptionsForBun(allocator_, transform_args), @@ -53,7 +53,7 @@ const ServerBundleGeneratorThread = struct { return err; }; var estimated_input_lines_of_code: usize = 0; - _ = try bundler.ServeBundler.GenerateNodeModuleBundle.generate( + _ = try bundler.Bundler.GenerateNodeModuleBundle.generate( &server_bundler, allocator_, server_conf, @@ -99,7 +99,7 @@ pub const BunCommand = struct { var log = ctx.log; estimated_input_lines_of_code_ = 0; - var this_bundler = try bundler.ServeBundler.init(allocator, log, ctx.args, null, null); + var this_bundler = try bundler.Bundler.init(allocator, log, ctx.args, null, null); this_bundler.configureLinker(); var filepath: [*:0]const u8 = "node_modules.bun"; var server_bundle_filepath: [*:0]const u8 = "node_modules.server.bun"; @@ -170,7 +170,7 @@ pub const BunCommand = struct { // Always generate the client-only bundle // we can revisit this decision if people ask - var node_modules_ = try bundler.ServeBundler.GenerateNodeModuleBundle.generate( + var node_modules_ = try bundler.Bundler.GenerateNodeModuleBundle.generate( &this_bundler, allocator, loaded_framework, diff --git a/src/http.zig b/src/http.zig index 2a653ad61..b46151fda 100644 --- a/src/http.zig +++ b/src/http.zig @@ -40,7 +40,7 @@ const Request = picohttp.Request; const Response = picohttp.Response; pub const Headers = picohttp.Headers; pub const MimeType = @import("./http/mime_type.zig"); -const Bundler = bundler.ServeBundler; +const Bundler = bundler.Bundler; const Websocket = @import("./http/websocket.zig"); const js_printer = @import("./js_printer.zig"); const SOCKET_FLAGS = os.SOCK_CLOEXEC; diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig index 50585bf91..352b9caac 100644 --- a/src/javascript/jsc/javascript.zig +++ b/src/javascript/jsc/javascript.zig @@ -7,7 +7,7 @@ const NodeModuleBundle = @import("../../node_module_bundle.zig").NodeModuleBundl const logger = @import("../../logger.zig"); const Api = @import("../../api/schema.zig").Api; const options = @import("../../options.zig"); -const Bundler = @import("../../bundler.zig").ServeBundler; +const Bundler = @import("../../bundler.zig").Bundler; const ServerEntryPoint = @import("../../bundler.zig").ServerEntryPoint; const js_printer = @import("../../js_printer.zig"); const js_parser = @import("../../js_parser.zig"); diff --git a/src/linker.zig b/src/linker.zig index ff13198c1..4e42423b8 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -26,671 +26,667 @@ const resolve_path = @import("./resolver/resolve_path.zig"); const _bundler = @import("./bundler.zig"); const Bundler = _bundler.Bundler; const ResolveQueue = _bundler.ResolveQueue; +const ResolverType = Resolver.Resolver; const Runtime = @import("./runtime.zig").Runtime; pub const CSSResolveError = error{ResolveError}; pub const OnImportCallback = fn (resolve_result: *const Resolver.Result, import_record: *ImportRecord, source_dir: string) void; -pub fn NewLinker(comptime BundlerType: type) type { - return struct { - const HashedFileNameMap = std.AutoHashMap(u64, string); - const ThisLinker = @This(); +pub const Linker = struct { + const HashedFileNameMap = std.AutoHashMap(u64, string); + const ThisLinker = @This(); + allocator: *std.mem.Allocator, + options: *Options.BundleOptions, + fs: *Fs.FileSystem, + log: *logger.Log, + resolve_queue: *ResolveQueue, + resolver: *ResolverType, + resolve_results: *_bundler.ResolveResults, + any_needs_runtime: bool = false, + runtime_import_record: ?ImportRecord = null, + runtime_source_path: string, + hashed_filenames: HashedFileNameMap, + import_counter: usize = 0, + + onImportCSS: ?OnImportCallback = null, + + pub fn init( allocator: *std.mem.Allocator, - options: *Options.BundleOptions, - fs: *Fs.FileSystem, log: *logger.Log, resolve_queue: *ResolveQueue, - resolver: *BundlerType.Resolver, + options: *Options.BundleOptions, + resolver: *ResolverType, resolve_results: *_bundler.ResolveResults, - any_needs_runtime: bool = false, - runtime_import_record: ?ImportRecord = null, - runtime_source_path: string, - hashed_filenames: HashedFileNameMap, - import_counter: usize = 0, - - onImportCSS: ?OnImportCallback = null, - - pub fn init( - allocator: *std.mem.Allocator, - log: *logger.Log, - resolve_queue: *ResolveQueue, - options: *Options.BundleOptions, - resolver: *BundlerType.Resolver, - resolve_results: *_bundler.ResolveResults, - fs: *Fs.FileSystem, - ) ThisLinker { - relative_paths_list = ImportPathsList.init(allocator); - - return ThisLinker{ - .allocator = allocator, - .options = options, - .fs = fs, - .log = log, - .resolve_queue = resolve_queue, - .resolver = resolver, - .resolve_results = resolve_results, - .runtime_source_path = fs.absAlloc(allocator, &([_]string{"__runtime.js"})) catch unreachable, - .hashed_filenames = HashedFileNameMap.init(allocator), - }; - } - - // fs: fs.FileSystem, - // TODO: - pub fn requireOrImportMetaForSource(c: ThisLinker, source_index: Ref.Int) RequireOrImportMeta { - return RequireOrImportMeta{}; - } - - pub fn getHashedFilename( - this: *ThisLinker, - file_path: Fs.Path, - fd: ?FileDescriptorType, - ) !string { - if (BundlerType.isCacheEnabled) { - var hashed = std.hash.Wyhash.hash(0, file_path.text); - var hashed_result = try this.hashed_filenames.getOrPut(hashed); - if (hashed_result.found_existing) { - return hashed_result.value_ptr.*; - } - } - - var file: std.fs.File = if (fd) |_fd| std.fs.File{ .handle = _fd } else try std.fs.openFileAbsolute(file_path.text, .{ .read = true }); - Fs.FileSystem.setMaxFd(file.handle); - var modkey = try Fs.FileSystem.RealFS.ModKey.generate(&this.fs.fs, file_path.text, file); - const hash_name = try modkey.hashName(file_path.name.base); - - if (BundlerType.isCacheEnabled) { - var hashed = std.hash.Wyhash.hash(0, file_path.text); - try this.hashed_filenames.put(hashed, try this.allocator.dupe(u8, hash_name)); + fs: *Fs.FileSystem, + ) ThisLinker { + relative_paths_list = ImportPathsList.init(allocator); + + return ThisLinker{ + .allocator = allocator, + .options = options, + .fs = fs, + .log = log, + .resolve_queue = resolve_queue, + .resolver = resolver, + .resolve_results = resolve_results, + .runtime_source_path = fs.absAlloc(allocator, &([_]string{"__runtime.js"})) catch unreachable, + .hashed_filenames = HashedFileNameMap.init(allocator), + }; + } + + // fs: fs.FileSystem, + // TODO: + pub fn requireOrImportMetaForSource(c: ThisLinker, source_index: Ref.Int) RequireOrImportMeta { + return RequireOrImportMeta{}; + } + + pub fn getHashedFilename( + this: *ThisLinker, + file_path: Fs.Path, + fd: ?FileDescriptorType, + ) !string { + if (Bundler.isCacheEnabled) { + var hashed = std.hash.Wyhash.hash(0, file_path.text); + var hashed_result = try this.hashed_filenames.getOrPut(hashed); + if (hashed_result.found_existing) { + return hashed_result.value_ptr.*; } + } - if (this.fs.fs.needToCloseFiles() and fd == null) { - file.close(); - } + var file: std.fs.File = if (fd) |_fd| std.fs.File{ .handle = _fd } else try std.fs.openFileAbsolute(file_path.text, .{ .read = true }); + Fs.FileSystem.setMaxFd(file.handle); + var modkey = try Fs.FileSystem.RealFS.ModKey.generate(&this.fs.fs, file_path.text, file); + const hash_name = try modkey.hashName(file_path.name.base); - return hash_name; + if (Bundler.isCacheEnabled) { + var hashed = std.hash.Wyhash.hash(0, file_path.text); + try this.hashed_filenames.put(hashed, try this.allocator.dupe(u8, hash_name)); } - pub fn resolveCSS( - this: anytype, - path: Fs.Path, - url: string, - range: logger.Range, - kind: ImportKind, - comptime import_path_format: Options.BundleOptions.ImportPathFormat, - comptime resolve_only: bool, - ) !string { - const dir = path.name.dirWithTrailingSlash(); - - switch (kind) { - .at => { - var resolve_result = try this.resolver.resolve(dir, url, .at); - if (resolve_only or resolve_result.is_external) { - return resolve_result.path_pair.primary.text; - } + if (this.fs.fs.needToCloseFiles() and fd == null) { + file.close(); + } - var import_record = ImportRecord{ .range = range, .path = resolve_result.path_pair.primary, .kind = kind }; + return hash_name; + } + + pub fn resolveCSS( + this: anytype, + path: Fs.Path, + url: string, + range: logger.Range, + kind: ImportKind, + comptime import_path_format: Options.BundleOptions.ImportPathFormat, + comptime resolve_only: bool, + ) !string { + const dir = path.name.dirWithTrailingSlash(); + + switch (kind) { + .at => { + var resolve_result = try this.resolver.resolve(dir, url, .at); + if (resolve_only or resolve_result.is_external) { + return resolve_result.path_pair.primary.text; + } - const loader = this.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file; + var import_record = ImportRecord{ .range = range, .path = resolve_result.path_pair.primary, .kind = kind }; - this.processImportRecord(loader, dir, &resolve_result, &import_record, import_path_format) catch unreachable; - return import_record.path.text; - }, - .at_conditional => { - var resolve_result = try this.resolver.resolve(dir, url, .at_conditional); - if (resolve_only or resolve_result.is_external) { - return resolve_result.path_pair.primary.text; - } + const loader = this.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file; - var import_record = ImportRecord{ .range = range, .path = resolve_result.path_pair.primary, .kind = kind }; - const loader = this.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file; - - this.processImportRecord(loader, dir, &resolve_result, &import_record, import_path_format) catch unreachable; - return import_record.path.text; - }, - .url => { - var resolve_result = try this.resolver.resolve(dir, url, .url); - if (resolve_only or resolve_result.is_external) { - return resolve_result.path_pair.primary.text; - } + this.processImportRecord(loader, dir, &resolve_result, &import_record, import_path_format) catch unreachable; + return import_record.path.text; + }, + .at_conditional => { + var resolve_result = try this.resolver.resolve(dir, url, .at_conditional); + if (resolve_only or resolve_result.is_external) { + return resolve_result.path_pair.primary.text; + } - var import_record = ImportRecord{ .range = range, .path = resolve_result.path_pair.primary, .kind = kind }; - const loader = this.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file; + var import_record = ImportRecord{ .range = range, .path = resolve_result.path_pair.primary, .kind = kind }; + const loader = this.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file; - this.processImportRecord(loader, dir, &resolve_result, &import_record, import_path_format) catch unreachable; - return import_record.path.text; - }, - else => unreachable, - } - unreachable; - } + this.processImportRecord(loader, dir, &resolve_result, &import_record, import_path_format) catch unreachable; + return import_record.path.text; + }, + .url => { + var resolve_result = try this.resolver.resolve(dir, url, .url); + if (resolve_only or resolve_result.is_external) { + return resolve_result.path_pair.primary.text; + } - pub inline fn nodeModuleBundleImportPath(this: *const ThisLinker) string { - if (this.options.platform == .bun) return "/node_modules.server.bun"; + var import_record = ImportRecord{ .range = range, .path = resolve_result.path_pair.primary, .kind = kind }; + const loader = this.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file; - return if (this.options.node_modules_bundle_url.len > 0) - this.options.node_modules_bundle_url - else - this.options.node_modules_bundle.?.bundle.import_from_name; + this.processImportRecord(loader, dir, &resolve_result, &import_record, import_path_format) catch unreachable; + return import_record.path.text; + }, + else => unreachable, } - - // pub const Scratch = struct { - // threadlocal var externals: std.ArrayList(u32) = undefined; - // threadlocal var has_externals: std.ArrayList(u32) = undefined; - // pub fn externals() { - - // } - // }; - // This modifies the Ast in-place! - // But more importantly, this does the following: - // - Wrap CommonJS files - threadlocal var require_part: js_ast.Part = undefined; - threadlocal var require_part_stmts: [1]js_ast.Stmt = undefined; - threadlocal var require_part_import_statement: js_ast.S.Import = undefined; - threadlocal var require_part_import_clauses: [1]js_ast.ClauseItem = undefined; - const require_alias: string = "__require"; - pub fn link( - linker: *ThisLinker, - file_path: Fs.Path, - result: *_bundler.ParseResult, - comptime import_path_format: Options.BundleOptions.ImportPathFormat, - comptime ignore_runtime: bool, - ) !void { - var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny(); - const source_dir = if (file_path.is_symlink and file_path.pretty.len > 0 and import_path_format == .absolute_url and linker.options.platform != .bun) - Fs.PathName.init(file_path.pretty).dirWithTrailingSlash() - else - file_path.sourceDir(); - var externals = std.ArrayList(u32).init(linker.allocator); - var needs_bundle = false; - var first_bundled_index: ?u32 = null; - var had_resolve_errors = false; - var needs_require = false; - - // Step 1. Resolve imports & requires - switch (result.loader) { - .jsx, .js, .ts, .tsx => { - for (result.ast.import_records) |*import_record, _record_index| { - if (import_record.is_unused) continue; - - const record_index = @truncate(u32, _record_index); - if (comptime !ignore_runtime) { - if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) { - // runtime is included in the bundle, so we don't need to dynamically import it - if (linker.options.node_modules_bundle != null) { - import_record.path.text = linker.nodeModuleBundleImportPath(); - result.ast.runtime_import_record_id = record_index; - } else { - import_record.path = try linker.generateImportPath( - source_dir, - linker.runtime_source_path, - Runtime.version(), - false, - "bun", - import_path_format, - ); - result.ast.runtime_import_record_id = record_index; - result.ast.needs_runtime = true; - } - continue; + unreachable; + } + + pub inline fn nodeModuleBundleImportPath(this: *const ThisLinker) string { + if (this.options.platform == .bun) return "/node_modules.server.bun"; + + return if (this.options.node_modules_bundle_url.len > 0) + this.options.node_modules_bundle_url + else + this.options.node_modules_bundle.?.bundle.import_from_name; + } + + // pub const Scratch = struct { + // threadlocal var externals: std.ArrayList(u32) = undefined; + // threadlocal var has_externals: std.ArrayList(u32) = undefined; + // pub fn externals() { + + // } + // }; + // This modifies the Ast in-place! + // But more importantly, this does the following: + // - Wrap CommonJS files + threadlocal var require_part: js_ast.Part = undefined; + threadlocal var require_part_stmts: [1]js_ast.Stmt = undefined; + threadlocal var require_part_import_statement: js_ast.S.Import = undefined; + threadlocal var require_part_import_clauses: [1]js_ast.ClauseItem = undefined; + const require_alias: string = "__require"; + pub fn link( + linker: *ThisLinker, + file_path: Fs.Path, + result: *_bundler.ParseResult, + comptime import_path_format: Options.BundleOptions.ImportPathFormat, + comptime ignore_runtime: bool, + ) !void { + var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny(); + const source_dir = if (file_path.is_symlink and file_path.pretty.len > 0 and import_path_format == .absolute_url and linker.options.platform != .bun) + Fs.PathName.init(file_path.pretty).dirWithTrailingSlash() + else + file_path.sourceDir(); + var externals = std.ArrayList(u32).init(linker.allocator); + var needs_bundle = false; + var first_bundled_index: ?u32 = null; + var had_resolve_errors = false; + var needs_require = false; + + // Step 1. Resolve imports & requires + switch (result.loader) { + .jsx, .js, .ts, .tsx => { + for (result.ast.import_records) |*import_record, _record_index| { + if (import_record.is_unused) continue; + + const record_index = @truncate(u32, _record_index); + if (comptime !ignore_runtime) { + if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) { + // runtime is included in the bundle, so we don't need to dynamically import it + if (linker.options.node_modules_bundle != null) { + import_record.path.text = linker.nodeModuleBundleImportPath(); + result.ast.runtime_import_record_id = record_index; + } else { + import_record.path = try linker.generateImportPath( + source_dir, + linker.runtime_source_path, + Runtime.version(), + false, + "bun", + import_path_format, + ); + result.ast.runtime_import_record_id = record_index; + result.ast.needs_runtime = true; } + continue; } + } - if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| { - const resolved_import: *const Resolver.Result = _resolved_import; - if (resolved_import.is_external) { - externals.append(record_index) catch unreachable; - continue; - } + if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| { + const resolved_import: *const Resolver.Result = _resolved_import; + if (resolved_import.is_external) { + externals.append(record_index) catch unreachable; + continue; + } - const path = resolved_import.pathConst() orelse { - import_record.path.is_disabled = true; - continue; - }; - - const loader = linker.options.loader(path.name.ext); - if (loader.isJavaScriptLikeOrJSON()) { - bundled: { - if (linker.options.node_modules_bundle) |node_modules_bundle| { - const package_json = resolved_import.package_json orelse break :bundled; - const package_base_dir = package_json.source.path.sourceDir(); - if (node_modules_bundle.getPackageIDByHash(package_json.hash)) |pkg_id| { - const package = node_modules_bundle.bundle.packages[pkg_id]; - - if (comptime isDebug) { - std.debug.assert(strings.eql(node_modules_bundle.str(package.name), package_json.name)); - std.debug.assert(strings.eql(node_modules_bundle.str(package.version), package_json.version)); - } - - const package_relative_path = linker.fs.relative( - package_base_dir, - if (!strings.eqlComptime(path.namespace, "node")) path.pretty else path.text, - ); + const path = resolved_import.pathConst() orelse { + import_record.path.is_disabled = true; + continue; + }; + + const loader = linker.options.loader(path.name.ext); + if (loader.isJavaScriptLikeOrJSON()) { + bundled: { + if (linker.options.node_modules_bundle) |node_modules_bundle| { + const package_json = resolved_import.package_json orelse break :bundled; + const package_base_dir = package_json.source.path.sourceDir(); + if (node_modules_bundle.getPackageIDByHash(package_json.hash)) |pkg_id| { + const package = node_modules_bundle.bundle.packages[pkg_id]; + + if (comptime isDebug) { + std.debug.assert(strings.eql(node_modules_bundle.str(package.name), package_json.name)); + std.debug.assert(strings.eql(node_modules_bundle.str(package.version), package_json.version)); + } + + const package_relative_path = linker.fs.relative( + package_base_dir, + if (!strings.eqlComptime(path.namespace, "node")) path.pretty else path.text, + ); - const found_module = node_modules_bundle.findModuleInPackage(&package, package_relative_path) orelse { - // linker.log.addErrorFmt( - // null, - // logger.Loc.Empty, - // linker.allocator, - // "New dependency import: \"{s}/{s}\"\nPlease run `bun bun` to update the .bun.", - // .{ - // package_json.name, - // package_relative_path, - // }, - // ) catch {}; - break :bundled; - }; - - if (comptime isDebug) { - const module_path = node_modules_bundle.str(found_module.path); - std.debug.assert( - strings.eql( - module_path, - package_relative_path, - ), - ); - } - - import_record.is_bundled = true; - import_record.path.text = linker.nodeModuleBundleImportPath(); - import_record.module_id = found_module.id; - needs_bundle = true; - continue; + const found_module = node_modules_bundle.findModuleInPackage(&package, package_relative_path) orelse { + // linker.log.addErrorFmt( + // null, + // logger.Loc.Empty, + // linker.allocator, + // "New dependency import: \"{s}/{s}\"\nPlease run `bun bun` to update the .bun.", + // .{ + // package_json.name, + // package_relative_path, + // }, + // ) catch {}; + break :bundled; + }; + + if (comptime isDebug) { + const module_path = node_modules_bundle.str(found_module.path); + std.debug.assert( + strings.eql( + module_path, + package_relative_path, + ), + ); } + + import_record.is_bundled = true; + import_record.path.text = linker.nodeModuleBundleImportPath(); + import_record.module_id = found_module.id; + needs_bundle = true; + continue; } } } + } - linker.processImportRecord( - loader, - - // Include trailing slash - source_dir, - resolved_import, - import_record, - import_path_format, - ) catch continue; - - // If we're importing a CommonJS module as ESM - // We need to do the following transform: - // import React from 'react'; - // => - // import {_require} from 'RUNTIME_IMPORTS'; - // import * as react_module from 'react'; - // var React = _require(react_module).default; - // UNLESS it's a namespace import - // If it's a namespace import, assume it's safe. - // We can do this in the printer instead of creating a bunch of AST nodes here. - // But we need to at least tell the printer that this needs to happen. - if (result.ast.exports_kind != .cjs and - (import_record.kind == .require or - (import_record.kind == .stmt and resolved_import.shouldAssumeCommonJS(import_record)))) - { - import_record.wrap_with_to_module = true; - import_record.module_id = @truncate(u32, std.hash.Wyhash.hash(0, path.pretty)); + linker.processImportRecord( + loader, - result.ast.needs_runtime = true; - needs_require = true; - } else if (result.ast.exports_kind == .cjs) { - import_record.module_id = @truncate(u32, std.hash.Wyhash.hash(0, path.pretty)); - } - } else |err| { - had_resolve_errors = true; - - switch (err) { - error.ModuleNotFound => { - if (import_record.path.text.len > 0 and Resolver.isPackagePath(import_record.path.text)) { - if (linker.options.platform.isWebLike() and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) { - try linker.log.addResolveError( - &result.source, - import_record.range, - linker.allocator, - "Could not resolve: \"{s}\". Try setting --platform=\"node\" (after bun build exists)", - .{import_record.path.text}, - import_record.kind, - ); - continue; - } else { - try linker.log.addResolveError( - &result.source, - import_record.range, - linker.allocator, - "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?", - .{import_record.path.text}, - import_record.kind, - ); - continue; - } + // Include trailing slash + source_dir, + resolved_import, + import_record, + import_path_format, + ) catch continue; + + // If we're importing a CommonJS module as ESM + // We need to do the following transform: + // import React from 'react'; + // => + // import {_require} from 'RUNTIME_IMPORTS'; + // import * as react_module from 'react'; + // var React = _require(react_module).default; + // UNLESS it's a namespace import + // If it's a namespace import, assume it's safe. + // We can do this in the printer instead of creating a bunch of AST nodes here. + // But we need to at least tell the printer that this needs to happen. + if (result.ast.exports_kind != .cjs and + (import_record.kind == .require or + (import_record.kind == .stmt and resolved_import.shouldAssumeCommonJS(import_record)))) + { + import_record.wrap_with_to_module = true; + import_record.module_id = @truncate(u32, std.hash.Wyhash.hash(0, path.pretty)); + + result.ast.needs_runtime = true; + needs_require = true; + } else if (result.ast.exports_kind == .cjs) { + import_record.module_id = @truncate(u32, std.hash.Wyhash.hash(0, path.pretty)); + } + } else |err| { + had_resolve_errors = true; + + switch (err) { + error.ModuleNotFound => { + if (import_record.path.text.len > 0 and Resolver.isPackagePath(import_record.path.text)) { + if (linker.options.platform.isWebLike() and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) { + try linker.log.addResolveError( + &result.source, + import_record.range, + linker.allocator, + "Could not resolve: \"{s}\". Try setting --platform=\"node\" (after bun build exists)", + .{import_record.path.text}, + import_record.kind, + ); + continue; } else { try linker.log.addResolveError( &result.source, import_record.range, linker.allocator, - "Could not resolve: \"{s}\"", - .{ - import_record.path.text, - }, + "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?", + .{import_record.path.text}, import_record.kind, ); continue; } - }, - else => { + } else { try linker.log.addResolveError( &result.source, import_record.range, linker.allocator, - "{s} resolving \"{s}\"", + "Could not resolve: \"{s}\"", .{ - @errorName(err), import_record.path.text, }, import_record.kind, ); continue; - }, - } + } + }, + else => { + try linker.log.addResolveError( + &result.source, + import_record.range, + linker.allocator, + "{s} resolving \"{s}\"", + .{ + @errorName(err), + import_record.path.text, + }, + import_record.kind, + ); + continue; + }, } } - }, - else => {}, - } - if (had_resolve_errors) return error.ResolveError; - result.ast.externals = externals.toOwnedSlice(); - - if (result.ast.needs_runtime and result.ast.runtime_import_record_id == null) { - var import_records = try linker.allocator.alloc(ImportRecord, result.ast.import_records.len + 1); - std.mem.copy(ImportRecord, import_records, result.ast.import_records); - - import_records[import_records.len - 1] = ImportRecord{ - .kind = .stmt, - .path = if (linker.options.node_modules_bundle != null) - Fs.Path.init(linker.nodeModuleBundleImportPath()) - else - try linker.generateImportPath( - source_dir, - linker.runtime_source_path, - Runtime.version(), - false, - "bun", - import_path_format, - ), - .range = logger.Range{ .loc = logger.Loc{ .start = 0 }, .len = 0 }, - }; - result.ast.runtime_import_record_id = @truncate(u32, import_records.len - 1); - result.ast.import_records = import_records; - } + } + }, + else => {}, + } + if (had_resolve_errors) return error.ResolveError; + result.ast.externals = externals.toOwnedSlice(); + + if (result.ast.needs_runtime and result.ast.runtime_import_record_id == null) { + var import_records = try linker.allocator.alloc(ImportRecord, result.ast.import_records.len + 1); + std.mem.copy(ImportRecord, import_records, result.ast.import_records); + + import_records[import_records.len - 1] = ImportRecord{ + .kind = .stmt, + .path = if (linker.options.node_modules_bundle != null) + Fs.Path.init(linker.nodeModuleBundleImportPath()) + else + try linker.generateImportPath( + source_dir, + linker.runtime_source_path, + Runtime.version(), + false, + "bun", + import_path_format, + ), + .range = logger.Range{ .loc = logger.Loc{ .start = 0 }, .len = 0 }, + }; + result.ast.runtime_import_record_id = @truncate(u32, import_records.len - 1); + result.ast.import_records = import_records; + } - // We _assume_ you're importing ESM. - // But, that assumption can be wrong without parsing code of the imports. - // That's where in here, we inject - // > import {require} from 'bun:runtime'; - // Since they definitely aren't using require, we don't have to worry about the symbol being renamed. - if (needs_require and !result.ast.uses_require_ref) { - result.ast.uses_require_ref = true; - require_part_import_clauses[0] = js_ast.ClauseItem{ - .alias = require_alias, - .original_name = "", - .alias_loc = logger.Loc.Empty, - .name = js_ast.LocRef{ - .loc = logger.Loc.Empty, - .ref = result.ast.require_ref, - }, - }; - - require_part_import_statement = js_ast.S.Import{ - .namespace_ref = Ref.None, - .items = std.mem.span(&require_part_import_clauses), - .import_record_index = result.ast.runtime_import_record_id.?, - }; - require_part_stmts[0] = js_ast.Stmt{ - .data = .{ .s_import = &require_part_import_statement }, + // We _assume_ you're importing ESM. + // But, that assumption can be wrong without parsing code of the imports. + // That's where in here, we inject + // > import {require} from 'bun:runtime'; + // Since they definitely aren't using require, we don't have to worry about the symbol being renamed. + if (needs_require and !result.ast.uses_require_ref) { + result.ast.uses_require_ref = true; + require_part_import_clauses[0] = js_ast.ClauseItem{ + .alias = require_alias, + .original_name = "", + .alias_loc = logger.Loc.Empty, + .name = js_ast.LocRef{ .loc = logger.Loc.Empty, - }; - result.ast.prepend_part = js_ast.Part{ .stmts = std.mem.span(&require_part_stmts) }; - } + .ref = result.ast.require_ref, + }, + }; - // This is a bad idea - // I don't think it's safe to do this - const ImportStatementSorter = struct { - import_records: []ImportRecord, - pub fn lessThan(ctx: @This(), lhs: js_ast.Stmt, rhs: js_ast.Stmt) bool { - switch (lhs.data) { - .s_import => |li| { - switch (rhs.data) { - .s_import => |ri| { - const a = ctx.import_records[li.import_record_index]; - const b = ctx.import_records[ri.import_record_index]; - if (a.is_bundled and !b.is_bundled) { - return false; - } else { - return true; - } - }, - else => { + require_part_import_statement = js_ast.S.Import{ + .namespace_ref = Ref.None, + .items = std.mem.span(&require_part_import_clauses), + .import_record_index = result.ast.runtime_import_record_id.?, + }; + require_part_stmts[0] = js_ast.Stmt{ + .data = .{ .s_import = &require_part_import_statement }, + .loc = logger.Loc.Empty, + }; + result.ast.prepend_part = js_ast.Part{ .stmts = std.mem.span(&require_part_stmts) }; + } + + // This is a bad idea + // I don't think it's safe to do this + const ImportStatementSorter = struct { + import_records: []ImportRecord, + pub fn lessThan(ctx: @This(), lhs: js_ast.Stmt, rhs: js_ast.Stmt) bool { + switch (lhs.data) { + .s_import => |li| { + switch (rhs.data) { + .s_import => |ri| { + const a = ctx.import_records[li.import_record_index]; + const b = ctx.import_records[ri.import_record_index]; + if (a.is_bundled and !b.is_bundled) { + return false; + } else { return true; - }, - } - }, - else => { - switch (rhs.data) { - .s_import => |ri| { - const a = ctx.import_records[ri.import_record_index]; - if (!a.is_bundled) { - return false; - } else { - return true; - } - }, - else => { + } + }, + else => { + return true; + }, + } + }, + else => { + switch (rhs.data) { + .s_import => |ri| { + const a = ctx.import_records[ri.import_record_index]; + if (!a.is_bundled) { + return false; + } else { return true; - }, - } - }, - } + } + }, + else => { + return true; + }, + } + }, } - }; + } + }; - // std.sort.sort(comptime T: type, items: []T, context: anytype, comptime lessThan: fn(context:@TypeOf(context), lhs:T, rhs:T)bool) + // std.sort.sort(comptime T: type, items: []T, context: anytype, comptime lessThan: fn(context:@TypeOf(context), lhs:T, rhs:T)bool) - // Change the import order so that any bundled imports appear last - // This is to make it so the bundle (which should be quite large) is least likely to block rendering - // if (needs_bundle) { - // const sorter = ImportStatementSorter{ .import_records = result.ast.import_records }; - // for (result.ast.parts) |*part, i| { - // std.sort.sort(js_ast.Stmt, part.stmts, sorter, ImportStatementSorter.lessThan); - // } - // } - } + // Change the import order so that any bundled imports appear last + // This is to make it so the bundle (which should be quite large) is least likely to block rendering + // if (needs_bundle) { + // const sorter = ImportStatementSorter{ .import_records = result.ast.import_records }; + // for (result.ast.parts) |*part, i| { + // std.sort.sort(js_ast.Stmt, part.stmts, sorter, ImportStatementSorter.lessThan); + // } + // } + } + + const ImportPathsList = allocators.BSSStringList(512, 128); + pub var relative_paths_list: *ImportPathsList = undefined; + + pub fn generateImportPath( + linker: *ThisLinker, + source_dir: string, + source_path: string, + package_version: ?string, + use_hashed_name: bool, + namespace: string, + comptime import_path_format: Options.BundleOptions.ImportPathFormat, + ) !Fs.Path { + switch (import_path_format) { + .absolute_path => { + if (strings.eqlComptime(namespace, "node")) { + return Fs.Path.initWithNamespace(source_path, "node"); + } - const ImportPathsList = allocators.BSSStringList(512, 128); - pub var relative_paths_list: *ImportPathsList = undefined; - - pub fn generateImportPath( - linker: *ThisLinker, - source_dir: string, - source_path: string, - package_version: ?string, - use_hashed_name: bool, - namespace: string, - comptime import_path_format: Options.BundleOptions.ImportPathFormat, - ) !Fs.Path { - switch (import_path_format) { - .absolute_path => { - if (strings.eqlComptime(namespace, "node")) { - return Fs.Path.initWithNamespace(source_path, "node"); - } + var relative_name = linker.fs.relative(source_dir, source_path); + + return Fs.Path.initWithPretty(source_path, relative_name); + }, + .relative => { + var relative_name = linker.fs.relative(source_dir, source_path); + + var pretty: string = undefined; + if (use_hashed_name) { + var basepath = Fs.Path.init(source_path); + const basename = try linker.getHashedFilename(basepath, null); + var dir = basepath.name.dirWithTrailingSlash(); + var _pretty = try linker.allocator.alloc(u8, dir.len + basename.len + basepath.name.ext.len); + std.mem.copy(u8, _pretty, dir); + var remaining_pretty = _pretty[dir.len..]; + std.mem.copy(u8, remaining_pretty, basename); + remaining_pretty = remaining_pretty[basename.len..]; + std.mem.copy(u8, remaining_pretty, basepath.name.ext); + pretty = _pretty; + relative_name = try linker.allocator.dupe(u8, relative_name); + } else { + pretty = try linker.allocator.dupe(u8, relative_name); + relative_name = pretty; + } - var relative_name = linker.fs.relative(source_dir, source_path); + return Fs.Path.initWithPretty(pretty, relative_name); + }, + .relative_nodejs => { + var relative_name = linker.fs.relative(source_dir, source_path); + var pretty: string = undefined; + if (use_hashed_name) { + var basepath = Fs.Path.init(source_path); + const basename = try linker.getHashedFilename(basepath, null); + var dir = basepath.name.dirWithTrailingSlash(); + var _pretty = try linker.allocator.alloc(u8, dir.len + basename.len + basepath.name.ext.len); + std.mem.copy(u8, _pretty, dir); + var remaining_pretty = _pretty[dir.len..]; + std.mem.copy(u8, remaining_pretty, basename); + remaining_pretty = remaining_pretty[basename.len..]; + std.mem.copy(u8, remaining_pretty, basepath.name.ext); + pretty = _pretty; + relative_name = try linker.allocator.dupe(u8, relative_name); + } else { + pretty = try linker.allocator.dupe(u8, relative_name); + relative_name = pretty; + } - return Fs.Path.initWithPretty(source_path, relative_name); - }, - .relative => { - var relative_name = linker.fs.relative(source_dir, source_path); + var pathname = Fs.PathName.init(pretty); + var path = Fs.Path.initWithPretty(pretty, relative_name); + path.text = path.text[0 .. path.text.len - path.name.ext.len]; + return path; + }, + + .absolute_url => { + if (strings.eqlComptime(namespace, "node")) { + if (comptime isDebug) std.debug.assert(strings.eqlComptime(source_path[0..5], "node:")); + + return Fs.Path.init(try std.fmt.allocPrint( + linker.allocator, + // assumption: already starts with "node:" + "{s}/{s}", + .{ + linker.options.origin.origin, + source_path, + }, + )); + } else { + var absolute_pathname = Fs.PathName.init(source_path); - var pretty: string = undefined; - if (use_hashed_name) { - var basepath = Fs.Path.init(source_path); - const basename = try linker.getHashedFilename(basepath, null); - var dir = basepath.name.dirWithTrailingSlash(); - var _pretty = try linker.allocator.alloc(u8, dir.len + basename.len + basepath.name.ext.len); - std.mem.copy(u8, _pretty, dir); - var remaining_pretty = _pretty[dir.len..]; - std.mem.copy(u8, remaining_pretty, basename); - remaining_pretty = remaining_pretty[basename.len..]; - std.mem.copy(u8, remaining_pretty, basepath.name.ext); - pretty = _pretty; - relative_name = try linker.allocator.dupe(u8, relative_name); - } else { - pretty = try linker.allocator.dupe(u8, relative_name); - relative_name = pretty; + if (!linker.options.preserve_extensions) { + if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| { + absolute_pathname.ext = ext; + } } - return Fs.Path.initWithPretty(pretty, relative_name); - }, - .relative_nodejs => { - var relative_name = linker.fs.relative(source_dir, source_path); - var pretty: string = undefined; - if (use_hashed_name) { - var basepath = Fs.Path.init(source_path); - const basename = try linker.getHashedFilename(basepath, null); - var dir = basepath.name.dirWithTrailingSlash(); - var _pretty = try linker.allocator.alloc(u8, dir.len + basename.len + basepath.name.ext.len); - std.mem.copy(u8, _pretty, dir); - var remaining_pretty = _pretty[dir.len..]; - std.mem.copy(u8, remaining_pretty, basename); - remaining_pretty = remaining_pretty[basename.len..]; - std.mem.copy(u8, remaining_pretty, basepath.name.ext); - pretty = _pretty; - relative_name = try linker.allocator.dupe(u8, relative_name); - } else { - pretty = try linker.allocator.dupe(u8, relative_name); - relative_name = pretty; + var base = linker.fs.relativeTo(source_path); + if (strings.lastIndexOfChar(base, '.')) |dot| { + base = base[0..dot]; } - var pathname = Fs.PathName.init(pretty); - var path = Fs.Path.initWithPretty(pretty, relative_name); - path.text = path.text[0 .. path.text.len - path.name.ext.len]; - return path; - }, - - .absolute_url => { - if (strings.eqlComptime(namespace, "node")) { - if (comptime isDebug) std.debug.assert(strings.eqlComptime(source_path[0..5], "node:")); - - return Fs.Path.init(try std.fmt.allocPrint( - linker.allocator, - // assumption: already starts with "node:" - "{s}/{s}", - .{ - linker.options.origin.origin, - source_path, - }, - )); - } else { - var absolute_pathname = Fs.PathName.init(source_path); - - if (!linker.options.preserve_extensions) { - if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| { - absolute_pathname.ext = ext; - } - } - - var base = linker.fs.relativeTo(source_path); - if (strings.lastIndexOfChar(base, '.')) |dot| { - base = base[0..dot]; - } - - var dirname = std.fs.path.dirname(base) orelse ""; - - var basename = std.fs.path.basename(base); + var dirname = std.fs.path.dirname(base) orelse ""; - if (use_hashed_name) { - var basepath = Fs.Path.init(source_path); - basename = try linker.getHashedFilename(basepath, null); - } + var basename = std.fs.path.basename(base); - return Fs.Path.init(try linker.options.origin.joinAlloc( - linker.allocator, - linker.options.routes.asset_prefix_path, - dirname, - basename, - absolute_pathname.ext, - source_path, - )); + if (use_hashed_name) { + var basepath = Fs.Path.init(source_path); + basename = try linker.getHashedFilename(basepath, null); } - }, - else => unreachable, - } - } + return Fs.Path.init(try linker.options.origin.joinAlloc( + linker.allocator, + linker.options.routes.asset_prefix_path, + dirname, + basename, + absolute_pathname.ext, + source_path, + )); + } + }, - pub fn processImportRecord( - linker: *ThisLinker, - loader: Options.Loader, - source_dir: string, - resolve_result: *const Resolver.Result, - import_record: *ImportRecord, - comptime import_path_format: Options.BundleOptions.ImportPathFormat, - ) !void { - linker.import_counter += 1; - // lazy means: - // Run the resolver - // Don't parse/print automatically. - if (linker.options.resolve_mode != .lazy) { - _ = try linker.enqueueResolveResult(resolve_result); - } - const path = resolve_result.pathConst() orelse unreachable; - - import_record.path = try linker.generateImportPath( - source_dir, - if (path.is_symlink and import_path_format == .absolute_url and linker.options.platform != .bun) path.pretty else path.text, - if (resolve_result.package_json) |package_json| package_json.version else "", - BundlerType.isCacheEnabled and loader == .file, - path.namespace, - import_path_format, - ); - - switch (loader) { - .css => { - if (linker.onImportCSS) |callback| { - callback(resolve_result, import_record, source_dir); - } - // This saves us a less reliable string check - import_record.print_mode = .css; - }, - .file => { - import_record.print_mode = .import_path; - }, - else => {}, - } + else => unreachable, } + } + + pub fn processImportRecord( + linker: *ThisLinker, + loader: Options.Loader, + source_dir: string, + resolve_result: *const Resolver.Result, + import_record: *ImportRecord, + comptime import_path_format: Options.BundleOptions.ImportPathFormat, + ) !void { + linker.import_counter += 1; + // lazy means: + // Run the resolver + // Don't parse/print automatically. + if (linker.options.resolve_mode != .lazy) { + _ = try linker.enqueueResolveResult(resolve_result); + } + const path = resolve_result.pathConst() orelse unreachable; + + import_record.path = try linker.generateImportPath( + source_dir, + if (path.is_symlink and import_path_format == .absolute_url and linker.options.platform != .bun) path.pretty else path.text, + if (resolve_result.package_json) |package_json| package_json.version else "", + Bundler.isCacheEnabled and loader == .file, + path.namespace, + import_path_format, + ); + + switch (loader) { + .css => { + if (linker.onImportCSS) |callback| { + callback(resolve_result, import_record, source_dir); + } + // This saves us a less reliable string check + import_record.print_mode = .css; + }, + .file => { + import_record.print_mode = .import_path; + }, + else => {}, + } + } - pub fn resolveResultHashKey(linker: *ThisLinker, resolve_result: *const Resolver.Result) u64 { - const path = resolve_result.pathConst() orelse unreachable; - var hash_key = path.text; - - // Shorter hash key is faster to hash - if (strings.startsWith(path.text, linker.fs.top_level_dir)) { - hash_key = path.text[linker.fs.top_level_dir.len..]; - } + pub fn resolveResultHashKey(linker: *ThisLinker, resolve_result: *const Resolver.Result) u64 { + const path = resolve_result.pathConst() orelse unreachable; + var hash_key = path.text; - return std.hash.Wyhash.hash(0, hash_key); + // Shorter hash key is faster to hash + if (strings.startsWith(path.text, linker.fs.top_level_dir)) { + hash_key = path.text[linker.fs.top_level_dir.len..]; } - pub fn enqueueResolveResult(linker: *ThisLinker, resolve_result: *const Resolver.Result) !bool { - const hash_key = linker.resolveResultHashKey(resolve_result); + return std.hash.Wyhash.hash(0, hash_key); + } - const get_or_put_entry = try linker.resolve_results.getOrPut(hash_key); + pub fn enqueueResolveResult(linker: *ThisLinker, resolve_result: *const Resolver.Result) !bool { + const hash_key = linker.resolveResultHashKey(resolve_result); - if (!get_or_put_entry.found_existing) { - try linker.resolve_queue.writeItem(resolve_result.*); - } + const get_or_put_entry = try linker.resolve_results.getOrPut(hash_key); - return !get_or_put_entry.found_existing; + if (!get_or_put_entry.found_existing) { + try linker.resolve_queue.writeItem(resolve_result.*); } - }; -} -pub const Linker = NewLinker(_bundler.Bundler); -pub const ServeLinker = NewLinker(_bundler.ServeBundler); + return !get_or_put_entry.found_existing; + } +}; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 1eeed401d..a40cc0f1c 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -10,7 +10,7 @@ const TSConfigJSON = @import("./tsconfig_json.zig").TSConfigJSON; const PackageJSON = @import("./package_json.zig").PackageJSON; const ESModule = @import("./package_json.zig").ESModule; const BrowserMap = @import("./package_json.zig").BrowserMap; - +const CacheSet = cache.Set; usingnamespace @import("./data_url.zig"); pub const DirInfo = @import("./dir_info.zig"); const HTTPWatcher = @import("../http.zig").Watcher; @@ -316,169 +316,140 @@ var resolver_Mutex_loaded: bool = false; // TODO: // - Fix "browser" field mapping // - Consider removing the string list abstraction? -pub fn NewResolver(cache_files: bool) type { - const CacheSet = if (cache_files) cache.Cache.Set else cache.ServeCache.Set; - - return struct { - const ThisResolver = @This(); - opts: options.BundleOptions, - fs: *Fs.FileSystem, - log: *logger.Log, +pub const Resolver = struct { + const ThisResolver = @This(); + opts: options.BundleOptions, + fs: *Fs.FileSystem, + log: *logger.Log, + allocator: *std.mem.Allocator, + node_module_bundle: ?*NodeModuleBundle, + extension_order: []const string = undefined, + + debug_logs: ?DebugLogs = null, + elapsed: i128 = 0, // tracing + + onStartWatchingDirectory: ?fn (*HTTPWatcher, dir_path: string, dir_fd: StoredFileDescriptorType) void = null, + onStartWatchingDirectoryCtx: ?*HTTPWatcher = null, + + caches: CacheSet, + + // These are sets that represent various conditions for the "exports" field + // in package.json. + // esm_conditions_default: std.StringHashMap(bool), + // esm_conditions_import: std.StringHashMap(bool), + // esm_conditions_require: std.StringHashMap(bool), + + // A special filtered import order for CSS "@import" imports. + // + // The "resolve extensions" setting determines the order of implicit + // extensions to try when resolving imports with the extension omitted. + // Sometimes people create a JavaScript/TypeScript file and a CSS file with + // the same name when they create a component. At a high level, users expect + // implicit extensions to resolve to the JS file when being imported from JS + // and to resolve to the CSS file when being imported from CSS. + // + // Different bundlers handle this in different ways. Parcel handles this by + // having the resolver prefer the same extension as the importing file in + // front of the configured "resolve extensions" order. Webpack's "css-loader" + // plugin just explicitly configures a special "resolve extensions" order + // consisting of only ".css" for CSS files. + // + // It's unclear what behavior is best here. What we currently do is to create + // a special filtered version of the configured "resolve extensions" order + // for CSS files that filters out any extension that has been explicitly + // configured with a non-CSS loader. This still gives users control over the + // order but avoids the scenario where we match an import in a CSS file to a + // JavaScript-related file. It's probably not perfect with plugins in the + // picture but it's better than some alternatives and probably pretty good. + // atImportExtensionOrder []string + + // This mutex serves two purposes. First of all, it guards access to "dirCache" + // which is potentially mutated during path resolution. But this mutex is also + // necessary for performance. The "React admin" benchmark mysteriously runs + // twice as fast when this mutex is locked around the whole resolve operation + // instead of around individual accesses to "dirCache". For some reason, + // reducing parallelism in the resolver helps the rest of the bundler go + // faster. I'm not sure why this is but please don't change this unless you + // do a lot of testing with various benchmarks and there aren't any regressions. + mutex: *Mutex, + + // This cache maps a directory path to information about that directory and + // all parent directories + dir_cache: *DirInfo.HashMap, + + pub fn init1( allocator: *std.mem.Allocator, - node_module_bundle: ?*NodeModuleBundle, - extension_order: []const string = undefined, - - debug_logs: ?DebugLogs = null, - elapsed: i128 = 0, // tracing - - onStartWatchingDirectory: ?fn (*HTTPWatcher, dir_path: string, dir_fd: StoredFileDescriptorType) void = null, - onStartWatchingDirectoryCtx: ?*HTTPWatcher = null, - - caches: CacheSet, - - // These are sets that represent various conditions for the "exports" field - // in package.json. - // esm_conditions_default: std.StringHashMap(bool), - // esm_conditions_import: std.StringHashMap(bool), - // esm_conditions_require: std.StringHashMap(bool), - - // A special filtered import order for CSS "@import" imports. - // - // The "resolve extensions" setting determines the order of implicit - // extensions to try when resolving imports with the extension omitted. - // Sometimes people create a JavaScript/TypeScript file and a CSS file with - // the same name when they create a component. At a high level, users expect - // implicit extensions to resolve to the JS file when being imported from JS - // and to resolve to the CSS file when being imported from CSS. - // - // Different bundlers handle this in different ways. Parcel handles this by - // having the resolver prefer the same extension as the importing file in - // front of the configured "resolve extensions" order. Webpack's "css-loader" - // plugin just explicitly configures a special "resolve extensions" order - // consisting of only ".css" for CSS files. - // - // It's unclear what behavior is best here. What we currently do is to create - // a special filtered version of the configured "resolve extensions" order - // for CSS files that filters out any extension that has been explicitly - // configured with a non-CSS loader. This still gives users control over the - // order but avoids the scenario where we match an import in a CSS file to a - // JavaScript-related file. It's probably not perfect with plugins in the - // picture but it's better than some alternatives and probably pretty good. - // atImportExtensionOrder []string - - // This mutex serves two purposes. First of all, it guards access to "dirCache" - // which is potentially mutated during path resolution. But this mutex is also - // necessary for performance. The "React admin" benchmark mysteriously runs - // twice as fast when this mutex is locked around the whole resolve operation - // instead of around individual accesses to "dirCache". For some reason, - // reducing parallelism in the resolver helps the rest of the bundler go - // faster. I'm not sure why this is but please don't change this unless you - // do a lot of testing with various benchmarks and there aren't any regressions. - mutex: *Mutex, - - // This cache maps a directory path to information about that directory and - // all parent directories - dir_cache: *DirInfo.HashMap, - - pub fn init1( - allocator: *std.mem.Allocator, - log: *logger.Log, - _fs: *Fs.FileSystem, - opts: options.BundleOptions, - ) ThisResolver { - if (!resolver_Mutex_loaded) { - resolver_Mutex = Mutex.init(); - resolver_Mutex_loaded = true; - } - - return ThisResolver{ - .allocator = allocator, - .dir_cache = DirInfo.HashMap.init(allocator), - .mutex = &resolver_Mutex, - .caches = CacheSet.init(allocator), - .opts = opts, - .fs = _fs, - .node_module_bundle = opts.node_modules_bundle, - .log = log, - .extension_order = opts.extension_order, - }; + log: *logger.Log, + _fs: *Fs.FileSystem, + opts: options.BundleOptions, + ) ThisResolver { + if (!resolver_Mutex_loaded) { + resolver_Mutex = Mutex.init(); + resolver_Mutex_loaded = true; } - pub fn isExternalPattern(r: *ThisResolver, import_path: string) bool { - for (r.opts.external.patterns) |pattern| { - if (import_path.len >= pattern.prefix.len + pattern.suffix.len and (strings.startsWith( - import_path, - pattern.prefix, - ) and strings.endsWith( - import_path, - pattern.suffix, - ))) { - return true; - } + return ThisResolver{ + .allocator = allocator, + .dir_cache = DirInfo.HashMap.init(allocator), + .mutex = &resolver_Mutex, + .caches = CacheSet.init(allocator), + .opts = opts, + .fs = _fs, + .node_module_bundle = opts.node_modules_bundle, + .log = log, + .extension_order = opts.extension_order, + }; + } + + pub fn isExternalPattern(r: *ThisResolver, import_path: string) bool { + for (r.opts.external.patterns) |pattern| { + if (import_path.len >= pattern.prefix.len + pattern.suffix.len and (strings.startsWith( + import_path, + pattern.prefix, + ) and strings.endsWith( + import_path, + pattern.suffix, + ))) { + return true; } - return false; } + return false; + } - pub fn flushDebugLogs(r: *ThisResolver, flush_mode: DebugLogs.FlushMode) !void { - if (r.debug_logs) |*debug| { - if (flush_mode == DebugLogs.FlushMode.fail) { - try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice()); - } else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) { - try r.log.addVerboseWithNotes(null, logger.Loc.Empty, debug.what, debug.notes.toOwnedSlice()); - } + pub fn flushDebugLogs(r: *ThisResolver, flush_mode: DebugLogs.FlushMode) !void { + if (r.debug_logs) |*debug| { + if (flush_mode == DebugLogs.FlushMode.fail) { + try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice()); + } else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) { + try r.log.addVerboseWithNotes(null, logger.Loc.Empty, debug.what, debug.notes.toOwnedSlice()); } } - var tracing_start: i128 = if (FeatureFlags.tracing) 0 else undefined; - - pub const bunFrameworkPackagePrefix = "bun-framework-"; - pub fn resolveFramework( - r: *ThisResolver, - package: string, - pair: *PackageJSON.FrameworkRouterPair, - comptime preference: PackageJSON.LoadFramework, - comptime load_defines: bool, - ) !void { - - // We want to enable developers to integrate frameworks without waiting on official support. - // But, we still want the command to do the actual framework integration to be succint - // This lets users type "--use next" instead of "--use bun-framework-next" - // If they're using a local file path, we skip this. - if (isPackagePath(package)) { - var prefixed_package_buf: [512]u8 = undefined; - // Prevent the extra lookup if the package is already prefixed, i.e. avoid "bun-framework-next-bun-framework-next" - if (strings.startsWith(package, bunFrameworkPackagePrefix) or package.len + bunFrameworkPackagePrefix.len >= prefixed_package_buf.len) { - return r._resolveFramework(package, pair, preference, load_defines) catch |err| { - switch (err) { - error.ModuleNotFound => { - Output.prettyErrorln("<r><red>ResolveError<r> can't find framework: <b>\"{s}\"<r>.\n\nMaybe it's not installed? Try running this:\n\n <b>npm install -D {s}<r>\n <b>bun bun --use {s}<r>", .{ package, package, package }); - Output.flush(); - std.os.exit(1); - }, - else => { - return err; - }, - } - }; - } - - prefixed_package_buf[0..bunFrameworkPackagePrefix.len].* = bunFrameworkPackagePrefix.*; - std.mem.copy(u8, prefixed_package_buf[bunFrameworkPackagePrefix.len..], package); - const prefixed_name = prefixed_package_buf[0 .. bunFrameworkPackagePrefix.len + package.len]; - return r._resolveFramework(prefixed_name, pair, preference, load_defines) catch |err| { + } + var tracing_start: i128 = if (FeatureFlags.tracing) 0 else undefined; + + pub const bunFrameworkPackagePrefix = "bun-framework-"; + pub fn resolveFramework( + r: *ThisResolver, + package: string, + pair: *PackageJSON.FrameworkRouterPair, + comptime preference: PackageJSON.LoadFramework, + comptime load_defines: bool, + ) !void { + + // We want to enable developers to integrate frameworks without waiting on official support. + // But, we still want the command to do the actual framework integration to be succint + // This lets users type "--use next" instead of "--use bun-framework-next" + // If they're using a local file path, we skip this. + if (isPackagePath(package)) { + var prefixed_package_buf: [512]u8 = undefined; + // Prevent the extra lookup if the package is already prefixed, i.e. avoid "bun-framework-next-bun-framework-next" + if (strings.startsWith(package, bunFrameworkPackagePrefix) or package.len + bunFrameworkPackagePrefix.len >= prefixed_package_buf.len) { + return r._resolveFramework(package, pair, preference, load_defines) catch |err| { switch (err) { error.ModuleNotFound => { - return r._resolveFramework(package, pair, preference, load_defines) catch |err2| { - switch (err2) { - error.ModuleNotFound => { - Output.prettyErrorln("<r><red>ResolveError<r> can't find framework: <b>\"{s}\"<r>.\n\nMaybe it's not installed? Try running this:\n\n <b>npm install -D {s}\n <b>bun bun --use {s}<r>", .{ package, prefixed_name, package }); - Output.flush(); - std.os.exit(1); - }, - else => { - return err; - }, - } - }; + Output.prettyErrorln("<r><red>ResolveError<r> can't find framework: <b>\"{s}\"<r>.\n\nMaybe it's not installed? Try running this:\n\n <b>npm install -D {s}<r>\n <b>bun bun --use {s}<r>", .{ package, package, package }); + Output.flush(); + std.os.exit(1); }, else => { return err; @@ -487,13 +458,24 @@ pub fn NewResolver(cache_files: bool) type { }; } - return r._resolveFramework(package, pair, preference, load_defines) catch |err| { + prefixed_package_buf[0..bunFrameworkPackagePrefix.len].* = bunFrameworkPackagePrefix.*; + std.mem.copy(u8, prefixed_package_buf[bunFrameworkPackagePrefix.len..], package); + const prefixed_name = prefixed_package_buf[0 .. bunFrameworkPackagePrefix.len + package.len]; + return r._resolveFramework(prefixed_name, pair, preference, load_defines) catch |err| { switch (err) { error.ModuleNotFound => { - Output.prettyError("<r><red>ResolveError<r> can't find local framework: <b>\"{s}\"<r>.", .{package}); - - Output.flush(); - std.os.exit(1); + return r._resolveFramework(package, pair, preference, load_defines) catch |err2| { + switch (err2) { + error.ModuleNotFound => { + Output.prettyErrorln("<r><red>ResolveError<r> can't find framework: <b>\"{s}\"<r>.\n\nMaybe it's not installed? Try running this:\n\n <b>npm install -D {s}\n <b>bun bun --use {s}<r>", .{ package, prefixed_name, package }); + Output.flush(); + std.os.exit(1); + }, + else => { + return err; + }, + } + }; }, else => { return err; @@ -502,2079 +484,2077 @@ pub fn NewResolver(cache_files: bool) type { }; } - fn _resolveFramework( - r: *ThisResolver, - package: string, - pair: *PackageJSON.FrameworkRouterPair, - comptime preference: PackageJSON.LoadFramework, - comptime load_defines: bool, - ) !void { + return r._resolveFramework(package, pair, preference, load_defines) catch |err| { + switch (err) { + error.ModuleNotFound => { + Output.prettyError("<r><red>ResolveError<r> can't find local framework: <b>\"{s}\"<r>.", .{package}); - // TODO: make this only parse package.json once - var result = try r.resolve(r.fs.top_level_dir, package, .internal); - // support passing a package.json or path to a package - const pkg: *const PackageJSON = result.package_json orelse r.packageJSONForResolvedNodeModuleWithIgnoreMissingName(&result, true) orelse return error.MissingPackageJSON; + Output.flush(); + std.os.exit(1); + }, + else => { + return err; + }, + } + }; + } - const json = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError; + fn _resolveFramework( + r: *ThisResolver, + package: string, + pair: *PackageJSON.FrameworkRouterPair, + comptime preference: PackageJSON.LoadFramework, + comptime load_defines: bool, + ) !void { - pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference); - const dir = pkg.source.path.sourceDir(); + // TODO: make this only parse package.json once + var result = try r.resolve(r.fs.top_level_dir, package, .internal); + // support passing a package.json or path to a package + const pkg: *const PackageJSON = result.package_json orelse r.packageJSONForResolvedNodeModuleWithIgnoreMissingName(&result, true) orelse return error.MissingPackageJSON; - var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + const json = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError; - pair.framework.resolved_dir = pkg.source.path.sourceDir(); + pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference); + const dir = pkg.source.path.sourceDir(); - if (pair.framework.client.isEnabled()) { - var parts = [_]string{ dir, pair.framework.client.path }; - const abs = r.fs.abs(&parts); - pair.framework.client.path = try r.allocator.dupe(u8, abs); - pair.framework.resolved = true; - } + var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; - if (pair.framework.server.isEnabled()) { - var parts = [_]string{ dir, pair.framework.server.path }; - const abs = r.fs.abs(&parts); - pair.framework.server.path = try r.allocator.dupe(u8, abs); - pair.framework.resolved = true; - } + pair.framework.resolved_dir = pkg.source.path.sourceDir(); - if (pair.framework.fallback.isEnabled()) { - var parts = [_]string{ dir, pair.framework.fallback.path }; - const abs = r.fs.abs(&parts); - pair.framework.fallback.path = try r.allocator.dupe(u8, abs); - pair.framework.resolved = true; - } + if (pair.framework.client.isEnabled()) { + var parts = [_]string{ dir, pair.framework.client.path }; + const abs = r.fs.abs(&parts); + pair.framework.client.path = try r.allocator.dupe(u8, abs); + pair.framework.resolved = true; + } - if (pair.loaded_routes) { - const chosen_dir: string = brk: { - if (pair.router.possible_dirs.len > 0) { - for (pair.router.possible_dirs) |route_dir| { - var parts = [_]string{ r.fs.top_level_dir, std.fs.path.sep_str, route_dir }; - const abs = r.fs.join(&parts); - // must end in trailing slash - break :brk (std.os.realpath(abs, &buf) catch continue); - } - return error.MissingRouteDir; - } else { - var parts = [_]string{ r.fs.top_level_dir, std.fs.path.sep_str, pair.router.dir }; + if (pair.framework.server.isEnabled()) { + var parts = [_]string{ dir, pair.framework.server.path }; + const abs = r.fs.abs(&parts); + pair.framework.server.path = try r.allocator.dupe(u8, abs); + pair.framework.resolved = true; + } + + if (pair.framework.fallback.isEnabled()) { + var parts = [_]string{ dir, pair.framework.fallback.path }; + const abs = r.fs.abs(&parts); + pair.framework.fallback.path = try r.allocator.dupe(u8, abs); + pair.framework.resolved = true; + } + + if (pair.loaded_routes) { + const chosen_dir: string = brk: { + if (pair.router.possible_dirs.len > 0) { + for (pair.router.possible_dirs) |route_dir| { + var parts = [_]string{ r.fs.top_level_dir, std.fs.path.sep_str, route_dir }; const abs = r.fs.join(&parts); // must end in trailing slash - break :brk std.os.realpath(abs, &buf) catch return error.MissingRouteDir; + break :brk (std.os.realpath(abs, &buf) catch continue); } - }; + return error.MissingRouteDir; + } else { + var parts = [_]string{ r.fs.top_level_dir, std.fs.path.sep_str, pair.router.dir }; + const abs = r.fs.join(&parts); + // must end in trailing slash + break :brk std.os.realpath(abs, &buf) catch return error.MissingRouteDir; + } + }; - var out = try r.allocator.alloc(u8, chosen_dir.len + 1); - std.mem.copy(u8, out, chosen_dir); - out[out.len - 1] = '/'; - pair.router.dir = out; - pair.router.routes_enabled = true; - } + var out = try r.allocator.alloc(u8, chosen_dir.len + 1); + std.mem.copy(u8, out, chosen_dir); + out[out.len - 1] = '/'; + pair.router.dir = out; + pair.router.routes_enabled = true; } + } - pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result { - r.extension_order = if (kind.isFromCSS()) std.mem.span(&options.BundleOptions.Defaults.CSSExtensionOrder) else r.opts.extension_order; + pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result { + r.extension_order = if (kind.isFromCSS()) std.mem.span(&options.BundleOptions.Defaults.CSSExtensionOrder) else r.opts.extension_order; + if (FeatureFlags.tracing) { + tracing_start = std.time.nanoTimestamp(); + } + defer { if (FeatureFlags.tracing) { - tracing_start = std.time.nanoTimestamp(); + r.elapsed += std.time.nanoTimestamp() - tracing_start; } - defer { - if (FeatureFlags.tracing) { - r.elapsed += std.time.nanoTimestamp() - tracing_start; - } + } + if (r.log.level == .verbose) { + if (r.debug_logs != null) { + r.debug_logs.?.deinit(); } - if (r.log.level == .verbose) { - if (r.debug_logs != null) { - r.debug_logs.?.deinit(); - } - r.debug_logs = try DebugLogs.init(r.allocator); - } + r.debug_logs = try DebugLogs.init(r.allocator); + } - if (import_path.len == 0) return error.ModuleNotFound; + if (import_path.len == 0) return error.ModuleNotFound; - // Certain types of URLs default to being external for convenience - if (r.isExternalPattern(import_path) or - // "fill: url(#filter);" - (kind.isFromCSS() and strings.startsWith(import_path, "#")) or + // Certain types of URLs default to being external for convenience + if (r.isExternalPattern(import_path) or + // "fill: url(#filter);" + (kind.isFromCSS() and strings.startsWith(import_path, "#")) or - // "background: url(http://example.com/images/image.png);" - strings.startsWith(import_path, "http://") or + // "background: url(http://example.com/images/image.png);" + strings.startsWith(import_path, "http://") or - // "background: url(https://example.com/images/image.png);" - strings.startsWith(import_path, "https://") or + // "background: url(https://example.com/images/image.png);" + strings.startsWith(import_path, "https://") or - // "background: url(//example.com/images/image.png);" - strings.startsWith(import_path, "//")) - { - if (r.debug_logs) |*debug| { - try debug.addNote("Marking this path as implicitly external"); - } - r.flushDebugLogs(.success) catch {}; - return Result{ - .import_kind = kind, - .path_pair = PathPair{ - .primary = Path.init(import_path), - }, - .is_external = true, - .module_type = .esm, - }; + // "background: url(//example.com/images/image.png);" + strings.startsWith(import_path, "//")) + { + if (r.debug_logs) |*debug| { + try debug.addNote("Marking this path as implicitly external"); } + r.flushDebugLogs(.success) catch {}; + return Result{ + .import_kind = kind, + .path_pair = PathPair{ + .primary = Path.init(import_path), + }, + .is_external = true, + .module_type = .esm, + }; + } - if (DataURL.parse(import_path)) |_data_url| { - const data_url: DataURL = _data_url; - // "import 'data:text/javascript,console.log(123)';" - // "@import 'data:text/css,body{background:white}';" - if (data_url.decode_mime_type() != .Unsupported) { - if (r.debug_logs) |*debug| { - debug.addNote("Putting this path in the \"dataurl\" namespace") catch {}; - } - r.flushDebugLogs(.success) catch {}; - return Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } }; - } - - // "background: url(data:image/png;base64,iVBORw0KGgo=);" + if (DataURL.parse(import_path)) |_data_url| { + const data_url: DataURL = _data_url; + // "import 'data:text/javascript,console.log(123)';" + // "@import 'data:text/css,body{background:white}';" + if (data_url.decode_mime_type() != .Unsupported) { if (r.debug_logs) |*debug| { - debug.addNote("Marking this \"dataurl\" as external") catch {}; + debug.addNote("Putting this path in the \"dataurl\" namespace") catch {}; } r.flushDebugLogs(.success) catch {}; - return Result{ - .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") }, - .is_external = true, - }; + return Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } }; } - // Fail now if there is no directory to resolve in. This can happen for - // virtual modules (e.g. stdin) if a resolve directory is not specified. - if (source_dir.len == 0) { - if (r.debug_logs) |*debug| { - debug.addNote("Cannot resolve this path without a directory") catch {}; - } - r.flushDebugLogs(.fail) catch {}; - return error.MissingResolveDir; + // "background: url(data:image/png;base64,iVBORw0KGgo=);" + if (r.debug_logs) |*debug| { + debug.addNote("Marking this \"dataurl\" as external") catch {}; } - - // r.mutex.lock(); - // defer r.mutex.unlock(); - errdefer (r.flushDebugLogs(.fail) catch {}); - var result = (try r.resolveWithoutSymlinks(source_dir, import_path, kind)) orelse { - r.flushDebugLogs(.fail) catch {}; - return error.ModuleNotFound; + r.flushDebugLogs(.success) catch {}; + return Result{ + .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") }, + .is_external = true, }; + } - if (!strings.eqlComptime(result.path_pair.primary.namespace, "node")) - try r.finalizeResult(&result); - - r.flushDebugLogs(.success) catch {}; - result.import_kind = kind; - return result; + // Fail now if there is no directory to resolve in. This can happen for + // virtual modules (e.g. stdin) if a resolve directory is not specified. + if (source_dir.len == 0) { + if (r.debug_logs) |*debug| { + debug.addNote("Cannot resolve this path without a directory") catch {}; + } + r.flushDebugLogs(.fail) catch {}; + return error.MissingResolveDir; } - pub fn finalizeResult(r: *ThisResolver, result: *Result) !void { - if (result.is_external) return; + // r.mutex.lock(); + // defer r.mutex.unlock(); + errdefer (r.flushDebugLogs(.fail) catch {}); + var result = (try r.resolveWithoutSymlinks(source_dir, import_path, kind)) orelse { + r.flushDebugLogs(.fail) catch {}; + return error.ModuleNotFound; + }; - var iter = result.path_pair.iter(); - while (iter.next()) |path| { - var dir: *DirInfo = (r.readDirInfo(path.name.dir) catch continue) orelse continue; - result.package_json = result.package_json orelse dir.enclosing_package_json; + if (!strings.eqlComptime(result.path_pair.primary.namespace, "node")) + try r.finalizeResult(&result); - if (dir.getEntries()) |entries| { - if (entries.get(path.name.filename)) |query| { - const symlink_path = query.entry.symlink(&r.fs.fs); - if (symlink_path.len > 0) { - path.setRealpath(symlink_path); - if (result.file_fd == 0) result.file_fd = query.entry.cache.fd; + r.flushDebugLogs(.success) catch {}; + result.import_kind = kind; + return result; + } - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path }) catch {}; - } - } else if (dir.abs_real_path.len > 0) { - var parts = [_]string{ dir.abs_real_path, query.entry.base() }; - var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + pub fn finalizeResult(r: *ThisResolver, result: *Result) !void { + if (result.is_external) return; - var out = r.fs.absBuf(&parts, &buf); + var iter = result.path_pair.iter(); + while (iter.next()) |path| { + var dir: *DirInfo = (r.readDirInfo(path.name.dir) catch continue) orelse continue; + result.package_json = result.package_json orelse dir.enclosing_package_json; - if (query.entry.cache.fd == 0) { - buf[out.len] = 0; - const span = buf[0..out.len :0]; - var file = try std.fs.openFileAbsoluteZ(span, .{ .read = true }); + if (dir.getEntries()) |entries| { + if (entries.get(path.name.filename)) |query| { + const symlink_path = query.entry.symlink(&r.fs.fs); + if (symlink_path.len > 0) { + path.setRealpath(symlink_path); + if (result.file_fd == 0) result.file_fd = query.entry.cache.fd; - if (comptime !FeatureFlags.store_file_descriptors) { - out = try std.os.getFdPath(query.entry.cache.fd, &buf); - file.close(); - } else { - query.entry.cache.fd = file.handle; - Fs.FileSystem.setMaxFd(file.handle); - } - } + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path }) catch {}; + } + } else if (dir.abs_real_path.len > 0) { + var parts = [_]string{ dir.abs_real_path, query.entry.base() }; + var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; - defer { - if (r.fs.fs.needToCloseFiles()) { - if (query.entry.cache.fd != 0) { - var file = std.fs.File{ .handle = query.entry.cache.fd }; - file.close(); - query.entry.cache.fd = 0; - } - } - } + var out = r.fs.absBuf(&parts, &buf); - if (comptime FeatureFlags.store_file_descriptors) { + if (query.entry.cache.fd == 0) { + buf[out.len] = 0; + const span = buf[0..out.len :0]; + var file = try std.fs.openFileAbsoluteZ(span, .{ .read = true }); + + if (comptime !FeatureFlags.store_file_descriptors) { out = try std.os.getFdPath(query.entry.cache.fd, &buf); + file.close(); + } else { + query.entry.cache.fd = file.handle; + Fs.FileSystem.setMaxFd(file.handle); } + } - const symlink = try Fs.FileSystem.FilenameStore.instance.append(@TypeOf(out), out); - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text }) catch {}; + defer { + if (r.fs.fs.needToCloseFiles()) { + if (query.entry.cache.fd != 0) { + var file = std.fs.File{ .handle = query.entry.cache.fd }; + file.close(); + query.entry.cache.fd = 0; + } } - query.entry.cache.symlink = PathString.init(symlink); - if (result.file_fd == 0) result.file_fd = query.entry.cache.fd; + } + + if (comptime FeatureFlags.store_file_descriptors) { + out = try std.os.getFdPath(query.entry.cache.fd, &buf); + } - path.setRealpath(symlink); + const symlink = try Fs.FileSystem.FilenameStore.instance.append(@TypeOf(out), out); + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text }) catch {}; } + query.entry.cache.symlink = PathString.init(symlink); + if (result.file_fd == 0) result.file_fd = query.entry.cache.fd; + + path.setRealpath(symlink); } } } - - if (result.package_json) |package_json| { - result.module_type = switch (package_json.module_type) { - .esm, .cjs => package_json.module_type, - .unknown => result.module_type, - }; - } } - pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !?Result { - - // This implements the module resolution algorithm from node.js, which is - // described here: https://nodejs.org/api/modules.html#modules_all_together - var result: Result = Result{ .path_pair = PathPair{ .primary = Path.empty } }; + if (result.package_json) |package_json| { + result.module_type = switch (package_json.module_type) { + .esm, .cjs => package_json.module_type, + .unknown => result.module_type, + }; + } + } - // Return early if this is already an absolute path. In addition to asking - // the file system whether this is an absolute path, we also explicitly check - // whether it starts with a "/" and consider that an absolute path too. This - // is because relative paths can technically start with a "/" on Windows - // because it's not an absolute path on Windows. Then people might write code - // with imports that start with a "/" that works fine on Windows only to - // experience unexpected build failures later on other operating systems. - // Treating these paths as absolute paths on all platforms means Windows - // users will not be able to accidentally make use of these paths. - if (strings.startsWith(import_path, "/") or std.fs.path.isAbsolutePosix(import_path)) { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path}) catch {}; - } + pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !?Result { + + // This implements the module resolution algorithm from node.js, which is + // described here: https://nodejs.org/api/modules.html#modules_all_together + var result: Result = Result{ .path_pair = PathPair{ .primary = Path.empty } }; + + // Return early if this is already an absolute path. In addition to asking + // the file system whether this is an absolute path, we also explicitly check + // whether it starts with a "/" and consider that an absolute path too. This + // is because relative paths can technically start with a "/" on Windows + // because it's not an absolute path on Windows. Then people might write code + // with imports that start with a "/" that works fine on Windows only to + // experience unexpected build failures later on other operating systems. + // Treating these paths as absolute paths on all platforms means Windows + // users will not be able to accidentally make use of these paths. + if (strings.startsWith(import_path, "/") or std.fs.path.isAbsolutePosix(import_path)) { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path}) catch {}; + } - // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file - if ((r.dirInfoCached(source_dir) catch null)) |_dir_info| { - const dir_info: *DirInfo = _dir_info; - if (dir_info.enclosing_tsconfig_json) |tsconfig| { - if (tsconfig.paths.count() > 0) { - if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| { + // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file + if ((r.dirInfoCached(source_dir) catch null)) |_dir_info| { + const dir_info: *DirInfo = _dir_info; + if (dir_info.enclosing_tsconfig_json) |tsconfig| { + if (tsconfig.paths.count() > 0) { + if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| { - // We don't set the directory fd here because it might remap an entirely different directory - return Result{ - .path_pair = res.path_pair, - .diff_case = res.diff_case, - .package_json = res.package_json, - .dirname_fd = res.dirname_fd, - .file_fd = res.file_fd, - }; - } + // We don't set the directory fd here because it might remap an entirely different directory + return Result{ + .path_pair = res.path_pair, + .diff_case = res.diff_case, + .package_json = res.package_json, + .dirname_fd = res.dirname_fd, + .file_fd = res.file_fd, + }; } } } + } - if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.contains(import_path)) { - // If the string literal in the source text is an absolute path and has - // been marked as an external module, mark it as *not* an absolute path. - // That way we preserve the literal text in the output and don't generate - // a relative path from the output directory to that path. - if (r.debug_logs) |*debug| { - debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path}) catch {}; - } - - return Result{ - .path_pair = .{ .primary = Path.init(import_path) }, - .is_external = true, - }; - } - - // Run node's resolution rules (e.g. adding ".js") - if (r.loadAsFileOrDirectory(import_path, kind)) |entry| { - return Result{ - .dirname_fd = entry.dirname_fd, - .path_pair = entry.path_pair, - .diff_case = entry.diff_case, - .package_json = entry.package_json, - .file_fd = entry.file_fd, - }; + if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.contains(import_path)) { + // If the string literal in the source text is an absolute path and has + // been marked as an external module, mark it as *not* an absolute path. + // That way we preserve the literal text in the output and don't generate + // a relative path from the output directory to that path. + if (r.debug_logs) |*debug| { + debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path}) catch {}; } - return null; + return Result{ + .path_pair = .{ .primary = Path.init(import_path) }, + .is_external = true, + }; } - // Check both relative and package paths for CSS URL tokens, with relative - // paths taking precedence over package paths to match Webpack behavior. - const is_package_path = isPackagePath(import_path); - var check_relative = !is_package_path or kind == .url; - var check_package = is_package_path; + // Run node's resolution rules (e.g. adding ".js") + if (r.loadAsFileOrDirectory(import_path, kind)) |entry| { + return Result{ + .dirname_fd = entry.dirname_fd, + .path_pair = entry.path_pair, + .diff_case = entry.diff_case, + .package_json = entry.package_json, + .file_fd = entry.file_fd, + }; + } - if (check_relative) { - const parts = [_]string{ source_dir, import_path }; - const abs_path = r.fs.absBuf(&parts, &relative_abs_path_buf); - - if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.contains(abs_path)) { - // If the string literal in the source text is an absolute path and has - // been marked as an external module, mark it as *not* an absolute path. - // That way we preserve the literal text in the output and don't generate - // a relative path from the output directory to that path. - if (r.debug_logs) |*debug| { - debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path}) catch {}; - } + return null; + } - return Result{ - .path_pair = .{ .primary = Path.init(r.fs.dirname_store.append(@TypeOf(abs_path), abs_path) catch unreachable) }, - .is_external = true, - }; + // Check both relative and package paths for CSS URL tokens, with relative + // paths taking precedence over package paths to match Webpack behavior. + const is_package_path = isPackagePath(import_path); + var check_relative = !is_package_path or kind == .url; + var check_package = is_package_path; + + if (check_relative) { + const parts = [_]string{ source_dir, import_path }; + const abs_path = r.fs.absBuf(&parts, &relative_abs_path_buf); + + if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.contains(abs_path)) { + // If the string literal in the source text is an absolute path and has + // been marked as an external module, mark it as *not* an absolute path. + // That way we preserve the literal text in the output and don't generate + // a relative path from the output directory to that path. + if (r.debug_logs) |*debug| { + debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path}) catch {}; } - // Check the "browser" map - if (r.dirInfoCached(std.fs.path.dirname(abs_path) orelse unreachable) catch null) |_import_dir_info| { - if (_import_dir_info.getEnclosingBrowserScope()) |import_dir_info| { - const pkg = import_dir_info.package_json.?; - if (r.checkBrowserMap( - import_dir_info, - abs_path, - .AbsolutePath, - )) |remap| { + return Result{ + .path_pair = .{ .primary = Path.init(r.fs.dirname_store.append(@TypeOf(abs_path), abs_path) catch unreachable) }, + .is_external = true, + }; + } - // Is the path disabled? - if (remap.len == 0) { - var _path = Path.init(r.fs.dirname_store.append(string, abs_path) catch unreachable); - _path.is_disabled = true; - return Result{ - .path_pair = PathPair{ - .primary = _path, - }, - }; - } + // Check the "browser" map + if (r.dirInfoCached(std.fs.path.dirname(abs_path) orelse unreachable) catch null) |_import_dir_info| { + if (_import_dir_info.getEnclosingBrowserScope()) |import_dir_info| { + const pkg = import_dir_info.package_json.?; + if (r.checkBrowserMap( + import_dir_info, + abs_path, + .AbsolutePath, + )) |remap| { - if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| { - result = Result{ - .path_pair = _result.path_pair, - .diff_case = _result.diff_case, - .module_type = pkg.module_type, - .dirname_fd = _result.dirname_fd, - .package_json = pkg, - }; - check_relative = false; - check_package = false; - } + // Is the path disabled? + if (remap.len == 0) { + var _path = Path.init(r.fs.dirname_store.append(string, abs_path) catch unreachable); + _path.is_disabled = true; + return Result{ + .path_pair = PathPair{ + .primary = _path, + }, + }; + } + + if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| { + result = Result{ + .path_pair = _result.path_pair, + .diff_case = _result.diff_case, + .module_type = pkg.module_type, + .dirname_fd = _result.dirname_fd, + .package_json = pkg, + }; + check_relative = false; + check_package = false; } } } + } - if (check_relative) { - if (r.loadAsFileOrDirectory(abs_path, kind)) |res| { - check_package = false; - result = Result{ - .path_pair = res.path_pair, - .diff_case = res.diff_case, - .dirname_fd = res.dirname_fd, - .package_json = res.package_json, - }; - } else if (!check_package) { - return null; - } + if (check_relative) { + if (r.loadAsFileOrDirectory(abs_path, kind)) |res| { + check_package = false; + result = Result{ + .path_pair = res.path_pair, + .diff_case = res.diff_case, + .dirname_fd = res.dirname_fd, + .package_json = res.package_json, + }; + } else if (!check_package) { + return null; } } + } - if (check_package) { - if (r.opts.polyfill_node_globals) { - var import_path_without_node_prefix = import_path; - const had_node_prefix = import_path_without_node_prefix.len > "node:".len and - strings.eqlComptime(import_path_without_node_prefix[0.."node:".len], "node:"); - - import_path_without_node_prefix = if (had_node_prefix) - import_path_without_node_prefix["node:".len..] - else - import_path_without_node_prefix; - - if (NodeFallbackModules.Map.get(import_path_without_node_prefix)) |*fallback_module| { - result.path_pair.primary = fallback_module.path; - result.module_type = .cjs; - result.package_json = @intToPtr(*PackageJSON, @ptrToInt(fallback_module.package_json)); - result.is_from_node_modules = true; - return result; - // "node:* - // "fs" - // "fs/*" - // These are disabled! - } else if (had_node_prefix or - (import_path_without_node_prefix.len >= 2 and strings.eqlComptimeIgnoreLen(import_path_without_node_prefix[0..2], "fs") and - (import_path_without_node_prefix.len == 2 or - import_path_without_node_prefix[3] == '/'))) - { - result.path_pair.primary.namespace = "node"; - result.path_pair.primary.text = import_path_without_node_prefix; - result.path_pair.primary.name = Fs.PathName.init(import_path_without_node_prefix); - result.module_type = .cjs; - result.path_pair.primary.is_disabled = true; - result.is_from_node_modules = true; - return result; - } + if (check_package) { + if (r.opts.polyfill_node_globals) { + var import_path_without_node_prefix = import_path; + const had_node_prefix = import_path_without_node_prefix.len > "node:".len and + strings.eqlComptime(import_path_without_node_prefix[0.."node:".len], "node:"); + + import_path_without_node_prefix = if (had_node_prefix) + import_path_without_node_prefix["node:".len..] + else + import_path_without_node_prefix; + + if (NodeFallbackModules.Map.get(import_path_without_node_prefix)) |*fallback_module| { + result.path_pair.primary = fallback_module.path; + result.module_type = .cjs; + result.package_json = @intToPtr(*PackageJSON, @ptrToInt(fallback_module.package_json)); + result.is_from_node_modules = true; + return result; + // "node:* + // "fs" + // "fs/*" + // These are disabled! + } else if (had_node_prefix or + (import_path_without_node_prefix.len >= 2 and strings.eqlComptimeIgnoreLen(import_path_without_node_prefix[0..2], "fs") and + (import_path_without_node_prefix.len == 2 or + import_path_without_node_prefix[3] == '/'))) + { + result.path_pair.primary.namespace = "node"; + result.path_pair.primary.text = import_path_without_node_prefix; + result.path_pair.primary.name = Fs.PathName.init(import_path_without_node_prefix); + result.module_type = .cjs; + result.path_pair.primary.is_disabled = true; + result.is_from_node_modules = true; + return result; } + } - // Check for external packages first - if (r.opts.external.node_modules.count() > 0) { - var query = import_path; - while (true) { - if (r.opts.external.node_modules.contains(query)) { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {}; - } - return Result{ - .path_pair = .{ .primary = Path.init(query) }, - .is_external = true, - }; + // Check for external packages first + if (r.opts.external.node_modules.count() > 0) { + var query = import_path; + while (true) { + if (r.opts.external.node_modules.contains(query)) { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {}; } - - // If the module "foo" has been marked as external, we also want to treat - // paths into that module such as "foo/bar" as external too. - var slash = strings.lastIndexOfChar(query, '/') orelse break; - query = query[0..slash]; + return Result{ + .path_pair = .{ .primary = Path.init(query) }, + .is_external = true, + }; } + + // If the module "foo" has been marked as external, we also want to treat + // paths into that module such as "foo/bar" as external too. + var slash = strings.lastIndexOfChar(query, '/') orelse break; + query = query[0..slash]; } + } - const source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return null; + const source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return null; - // Support remapping one package path to another via the "browser" field - if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| { - if (browser_scope.package_json) |package_json| { - if (r.checkBrowserMap( - browser_scope, - import_path, - .PackagePath, - )) |remapped| { - if (remapped.len == 0) { - // "browser": {"module": false} - if (r.loadNodeModules(import_path, kind, source_dir_info)) |node_module| { - var pair = node_module.path_pair; - pair.primary.is_disabled = true; - if (pair.secondary != null) { - pair.secondary.?.is_disabled = true; - } - return Result{ - .path_pair = pair, - .dirname_fd = node_module.dirname_fd, - .diff_case = node_module.diff_case, - .package_json = package_json, - }; + // Support remapping one package path to another via the "browser" field + if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| { + if (browser_scope.package_json) |package_json| { + if (r.checkBrowserMap( + browser_scope, + import_path, + .PackagePath, + )) |remapped| { + if (remapped.len == 0) { + // "browser": {"module": false} + if (r.loadNodeModules(import_path, kind, source_dir_info)) |node_module| { + var pair = node_module.path_pair; + pair.primary.is_disabled = true; + if (pair.secondary != null) { + pair.secondary.?.is_disabled = true; } - } else { - var primary = Path.init(import_path); - primary.is_disabled = true; return Result{ - .path_pair = PathPair{ .primary = primary }, - // this might not be null? i think it is - .diff_case = null, + .path_pair = pair, + .dirname_fd = node_module.dirname_fd, + .diff_case = node_module.diff_case, + .package_json = package_json, }; } + } else { + var primary = Path.init(import_path); + primary.is_disabled = true; + return Result{ + .path_pair = PathPair{ .primary = primary }, + // this might not be null? i think it is + .diff_case = null, + }; } } } + } - if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| { - result.path_pair = res.path_pair; - result.dirname_fd = res.dirname_fd; - result.file_fd = res.file_fd; - result.package_json = res.package_json; - result.diff_case = res.diff_case; - result.is_from_node_modules = result.is_from_node_modules or res.is_node_module; + if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| { + result.path_pair = res.path_pair; + result.dirname_fd = res.dirname_fd; + result.file_fd = res.file_fd; + result.package_json = res.package_json; + result.diff_case = res.diff_case; + result.is_from_node_modules = result.is_from_node_modules or res.is_node_module; - if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) { - return result; - } + if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) { + return result; + } - if (res.package_json) |pkg| { - var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return result; - if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| { - if (r.checkBrowserMap( - browser_scope, - res.path_pair.primary.text, - .AbsolutePath, - )) |remap| { - if (remap.len == 0) { - result.path_pair.primary.is_disabled = true; - result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file"); - } else { - if (r.resolveWithoutRemapping(base_dir_info, remap, kind)) |remapped| { - result.path_pair = remapped.path_pair; - result.dirname_fd = remapped.dirname_fd; - result.file_fd = remapped.file_fd; - result.package_json = remapped.package_json; - result.diff_case = remapped.diff_case; - result.is_from_node_modules = result.is_from_node_modules or remapped.is_node_module; - return result; - } + if (res.package_json) |pkg| { + var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return result; + if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| { + if (r.checkBrowserMap( + browser_scope, + res.path_pair.primary.text, + .AbsolutePath, + )) |remap| { + if (remap.len == 0) { + result.path_pair.primary.is_disabled = true; + result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file"); + } else { + if (r.resolveWithoutRemapping(base_dir_info, remap, kind)) |remapped| { + result.path_pair = remapped.path_pair; + result.dirname_fd = remapped.dirname_fd; + result.file_fd = remapped.file_fd; + result.package_json = remapped.package_json; + result.diff_case = remapped.diff_case; + result.is_from_node_modules = result.is_from_node_modules or remapped.is_node_module; + return result; } } } } - - return result; - } else { - // Note: node's "self references" are not currently supported - return null; } - } - return result; + return result; + } else { + // Note: node's "self references" are not currently supported + return null; + } } - pub fn packageJSONForResolvedNodeModule( - r: *ThisResolver, - result: *const Result, - ) ?*const PackageJSON { - return @call(.{ .modifier = .always_inline }, packageJSONForResolvedNodeModuleWithIgnoreMissingName, .{ r, result, true }); - } + return result; + } - // This is a fallback, hopefully not called often. It should be relatively quick because everything should be in the cache. - fn packageJSONForResolvedNodeModuleWithIgnoreMissingName( - r: *ThisResolver, - result: *const Result, - comptime ignore_missing_name: bool, - ) ?*const PackageJSON { - var dir_info = (r.dirInfoCached(result.path_pair.primary.name.dir) catch null) orelse return null; - while (true) { - if (dir_info.package_json) |pkg| { - // if it doesn't have a name, assume it's something just for adjusting the main fields (react-bootstrap does this) - // In that case, we really would like the top-level package that you download from NPM - // so we ignore any unnamed packages - if (comptime !ignore_missing_name) { - if (pkg.name.len > 0) { - return pkg; - } - } else { + pub fn packageJSONForResolvedNodeModule( + r: *ThisResolver, + result: *const Result, + ) ?*const PackageJSON { + return @call(.{ .modifier = .always_inline }, packageJSONForResolvedNodeModuleWithIgnoreMissingName, .{ r, result, true }); + } + + // This is a fallback, hopefully not called often. It should be relatively quick because everything should be in the cache. + fn packageJSONForResolvedNodeModuleWithIgnoreMissingName( + r: *ThisResolver, + result: *const Result, + comptime ignore_missing_name: bool, + ) ?*const PackageJSON { + var dir_info = (r.dirInfoCached(result.path_pair.primary.name.dir) catch null) orelse return null; + while (true) { + if (dir_info.package_json) |pkg| { + // if it doesn't have a name, assume it's something just for adjusting the main fields (react-bootstrap does this) + // In that case, we really would like the top-level package that you download from NPM + // so we ignore any unnamed packages + if (comptime !ignore_missing_name) { + if (pkg.name.len > 0) { return pkg; } + } else { + return pkg; } - - dir_info = dir_info.getParent() orelse return null; } - unreachable; + dir_info = dir_info.getParent() orelse return null; } - const node_module_root_string = std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str; - - pub fn rootNodeModulePackageJSON( - r: *ThisResolver, - result: *const Result, - ) ?RootPathPair { - const path = (result.pathConst() orelse return null); - var absolute = path.text; - // /foo/node_modules/@babel/standalone/index.js - // ^------------^ - var end = strings.lastIndexOf(absolute, node_module_root_string) orelse brk: { - // try non-symlinked version - if (path.pretty.len != absolute.len) { - absolute = path.pretty; - break :brk strings.lastIndexOf(absolute, node_module_root_string); - } - break :brk null; - } orelse return null; - end += node_module_root_string.len; - - const is_scoped_package = absolute[end] == '@'; + unreachable; + } + const node_module_root_string = std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str; + + pub fn rootNodeModulePackageJSON( + r: *ThisResolver, + result: *const Result, + ) ?RootPathPair { + const path = (result.pathConst() orelse return null); + var absolute = path.text; + // /foo/node_modules/@babel/standalone/index.js + // ^------------^ + var end = strings.lastIndexOf(absolute, node_module_root_string) orelse brk: { + // try non-symlinked version + if (path.pretty.len != absolute.len) { + absolute = path.pretty; + break :brk strings.lastIndexOf(absolute, node_module_root_string); + } + + break :brk null; + } orelse return null; + end += node_module_root_string.len; + + const is_scoped_package = absolute[end] == '@'; + end += strings.indexOfChar(absolute[end..], std.fs.path.sep) orelse return null; + + // /foo/node_modules/@babel/standalone/index.js + // ^ + if (is_scoped_package) { + end += 1; end += strings.indexOfChar(absolute[end..], std.fs.path.sep) orelse return null; + } - // /foo/node_modules/@babel/standalone/index.js - // ^ - if (is_scoped_package) { - end += 1; - end += strings.indexOfChar(absolute[end..], std.fs.path.sep) orelse return null; - } - - end += 1; + end += 1; - // /foo/node_modules/@babel/standalone/index.js - // ^ - const slice = absolute[0..end]; - - // Try to avoid the hash table lookup whenever possible - // That can cause filesystem lookups in parent directories and it requires a lock - if (result.package_json) |pkg| { - if (strings.eql(slice, pkg.source.path.name.dirWithTrailingSlash())) { - return RootPathPair{ - .package_json = pkg, - .base_path = slice, - }; - } - } + // /foo/node_modules/@babel/standalone/index.js + // ^ + const slice = absolute[0..end]; - { - var dir_info = (r.dirInfoCached(slice) catch null) orelse return null; + // Try to avoid the hash table lookup whenever possible + // That can cause filesystem lookups in parent directories and it requires a lock + if (result.package_json) |pkg| { + if (strings.eql(slice, pkg.source.path.name.dirWithTrailingSlash())) { return RootPathPair{ + .package_json = pkg, .base_path = slice, - .package_json = dir_info.package_json.?, }; } } - threadlocal var esm_subpath_buf: [512]u8 = undefined; - threadlocal var esm_absolute_package_path: [std.fs.MAX_PATH_BYTES]u8 = undefined; - threadlocal var esm_absolute_package_path_joined: [std.fs.MAX_PATH_BYTES]u8 = undefined; - pub fn loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult { - var dir_info = _dir_info; + { + var dir_info = (r.dirInfoCached(slice) catch null) orelse return null; + return RootPathPair{ + .base_path = slice, + .package_json = dir_info.package_json.?, + }; + } + } + + threadlocal var esm_subpath_buf: [512]u8 = undefined; + threadlocal var esm_absolute_package_path: [std.fs.MAX_PATH_BYTES]u8 = undefined; + threadlocal var esm_absolute_package_path_joined: [std.fs.MAX_PATH_BYTES]u8 = undefined; + pub fn loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult { + var dir_info = _dir_info; + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {}; + debug.increaseIndent() catch {}; + } + + defer { if (r.debug_logs) |*debug| { - debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {}; - debug.increaseIndent() catch {}; + debug.decreaseIndent() catch {}; } + } - defer { - if (r.debug_logs) |*debug| { - debug.decreaseIndent() catch {}; + // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file + + if (dir_info.enclosing_tsconfig_json) |tsconfig| { + // Try path substitutions first + if (tsconfig.paths.count() > 0) { + if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| { + return res; } } - // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file + // Try looking up the path relative to the base URL + if (tsconfig.hasBaseURL()) { + const base = tsconfig.base_url; + const paths = [_]string{ base, import_path }; + const abs = r.fs.absBuf(&paths, &load_as_file_or_directory_via_tsconfig_base_path); - if (dir_info.enclosing_tsconfig_json) |tsconfig| { - // Try path substitutions first - if (tsconfig.paths.count() > 0) { - if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| { - return res; - } + if (r.loadAsFileOrDirectory(abs, kind)) |res| { + return res; } + // r.allocator.free(abs); + } + } - // Try looking up the path relative to the base URL - if (tsconfig.hasBaseURL()) { - const base = tsconfig.base_url; - const paths = [_]string{ base, import_path }; - const abs = r.fs.absBuf(&paths, &load_as_file_or_directory_via_tsconfig_base_path); + const esm_ = ESModule.Package.parse(import_path, &esm_subpath_buf); - if (r.loadAsFileOrDirectory(abs, kind)) |res| { - return res; - } - // r.allocator.free(abs); + // Then check for the package in any enclosing "node_modules" directories + while (true) { + // Skip directories that are themselves called "node_modules", since we + // don't ever want to search for "node_modules/node_modules" + if (dir_info.has_node_modules) { + var _paths = [_]string{ dir_info.abs_path, "node_modules", import_path }; + const abs_path = r.fs.absBuf(&_paths, &node_modules_check_buf); + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path}) catch {}; } - } - const esm_ = ESModule.Package.parse(import_path, &esm_subpath_buf); - - // Then check for the package in any enclosing "node_modules" directories - while (true) { - // Skip directories that are themselves called "node_modules", since we - // don't ever want to search for "node_modules/node_modules" - if (dir_info.has_node_modules) { - var _paths = [_]string{ dir_info.abs_path, "node_modules", import_path }; - const abs_path = r.fs.absBuf(&_paths, &node_modules_check_buf); - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path}) catch {}; - } + if (esm_) |esm| { + const abs_package_path = brk: { + var parts = [_]string{ dir_info.abs_path, "node_modules", esm.name }; + break :brk r.fs.absBuf(&parts, &esm_absolute_package_path); + }; - if (esm_) |esm| { - const abs_package_path = brk: { - var parts = [_]string{ dir_info.abs_path, "node_modules", esm.name }; - break :brk r.fs.absBuf(&parts, &esm_absolute_package_path); - }; + if (r.dirInfoCached(abs_package_path) catch null) |pkg_dir_info| { + if (pkg_dir_info.package_json) |package_json| { + if (package_json.exports) |exports_map| { - if (r.dirInfoCached(abs_package_path) catch null) |pkg_dir_info| { - if (pkg_dir_info.package_json) |package_json| { - if (package_json.exports) |exports_map| { + // The condition set is determined by the kind of import - // The condition set is determined by the kind of import + const esmodule = ESModule{ + .conditions = switch (kind) { + ast.ImportKind.stmt, ast.ImportKind.dynamic => r.opts.conditions.import, + ast.ImportKind.require, ast.ImportKind.require_resolve => r.opts.conditions.require, + else => r.opts.conditions.default, + }, + .allocator = r.allocator, + .debug_logs = if (r.debug_logs) |*debug| debug else null, + }; - const esmodule = ESModule{ - .conditions = switch (kind) { - ast.ImportKind.stmt, ast.ImportKind.dynamic => r.opts.conditions.import, - ast.ImportKind.require, ast.ImportKind.require_resolve => r.opts.conditions.require, - else => r.opts.conditions.default, - }, - .allocator = r.allocator, - .debug_logs = if (r.debug_logs) |*debug| debug else null, + // Resolve against the path "/", then join it with the absolute + // directory path. This is done because ESM package resolution uses + // URLs while our path resolution uses file system paths. We don't + // want problems due to Windows paths, which are very unlike URL + // paths. We also want to avoid any "%" characters in the absolute + // directory path accidentally being interpreted as URL escapes. + var esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root); + + if ((esm_resolution.status == .Inexact or esm_resolution.status == .Exact) and strings.startsWith(esm_resolution.path, "/")) { + const abs_esm_path: string = brk: { + var parts = [_]string{ + abs_package_path, + esm_resolution.path[1..], + }; + break :brk r.fs.absBuf(&parts, &esm_absolute_package_path_joined); }; - // Resolve against the path "/", then join it with the absolute - // directory path. This is done because ESM package resolution uses - // URLs while our path resolution uses file system paths. We don't - // want problems due to Windows paths, which are very unlike URL - // paths. We also want to avoid any "%" characters in the absolute - // directory path accidentally being interpreted as URL escapes. - var esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root); - - if ((esm_resolution.status == .Inexact or esm_resolution.status == .Exact) and strings.startsWith(esm_resolution.path, "/")) { - const abs_esm_path: string = brk: { - var parts = [_]string{ - abs_package_path, - esm_resolution.path[1..], + switch (esm_resolution.status) { + .Exact => { + const resolved_dir_info = (r.dirInfoCached(std.fs.path.dirname(abs_esm_path).?) catch null) orelse { + esm_resolution.status = .ModuleNotFound; + return null; + }; + const entries = resolved_dir_info.getEntries() orelse { + esm_resolution.status = .ModuleNotFound; + return null; + }; + const entry_query = entries.get(std.fs.path.basename(abs_esm_path)) orelse { + esm_resolution.status = .ModuleNotFound; + return null; }; - break :brk r.fs.absBuf(&parts, &esm_absolute_package_path_joined); - }; - switch (esm_resolution.status) { - .Exact => { - const resolved_dir_info = (r.dirInfoCached(std.fs.path.dirname(abs_esm_path).?) catch null) orelse { - esm_resolution.status = .ModuleNotFound; - return null; - }; - const entries = resolved_dir_info.getEntries() orelse { - esm_resolution.status = .ModuleNotFound; - return null; - }; - const entry_query = entries.get(std.fs.path.basename(abs_esm_path)) orelse { - esm_resolution.status = .ModuleNotFound; - return null; - }; - - if (entry_query.entry.kind(&r.fs.fs) == .dir) { - esm_resolution.status = .UnsupportedDirectoryImport; - return null; - } + if (entry_query.entry.kind(&r.fs.fs) == .dir) { + esm_resolution.status = .UnsupportedDirectoryImport; + return null; + } - const absolute_out_path = brk: { - if (entry_query.entry.abs_path.isEmpty()) { - entry_query.entry.abs_path = - PathString.init(r.fs.dirname_store.append(@TypeOf(abs_esm_path), abs_esm_path) catch unreachable); - } - break :brk entry_query.entry.abs_path.slice(); - }; - - return MatchResult{ - .path_pair = PathPair{ - .primary = Path.initWithNamespace(absolute_out_path, "file"), - }, - .dirname_fd = entries.fd, - .file_fd = entry_query.entry.cache.fd, - .dir_info = resolved_dir_info, - .diff_case = entry_query.diff_case, - .is_node_module = true, - .package_json = resolved_dir_info.package_json orelse package_json, - }; - }, - .Inexact => { - // If this was resolved against an expansion key ending in a "/" - // instead of a "*", we need to try CommonJS-style implicit - // extension and/or directory detection. - if (r.loadAsFileOrDirectory(abs_esm_path, kind)) |*res| { - res.is_node_module = true; - res.package_json = res.package_json orelse package_json; - return res.*; + const absolute_out_path = brk: { + if (entry_query.entry.abs_path.isEmpty()) { + entry_query.entry.abs_path = + PathString.init(r.fs.dirname_store.append(@TypeOf(abs_esm_path), abs_esm_path) catch unreachable); } - esm_resolution.status = .ModuleNotFound; - return null; - }, - else => unreachable, - } + break :brk entry_query.entry.abs_path.slice(); + }; + + return MatchResult{ + .path_pair = PathPair{ + .primary = Path.initWithNamespace(absolute_out_path, "file"), + }, + .dirname_fd = entries.fd, + .file_fd = entry_query.entry.cache.fd, + .dir_info = resolved_dir_info, + .diff_case = entry_query.diff_case, + .is_node_module = true, + .package_json = resolved_dir_info.package_json orelse package_json, + }; + }, + .Inexact => { + // If this was resolved against an expansion key ending in a "/" + // instead of a "*", we need to try CommonJS-style implicit + // extension and/or directory detection. + if (r.loadAsFileOrDirectory(abs_esm_path, kind)) |*res| { + res.is_node_module = true; + res.package_json = res.package_json orelse package_json; + return res.*; + } + esm_resolution.status = .ModuleNotFound; + return null; + }, + else => unreachable, } } } } } - - if (r.loadAsFileOrDirectory(abs_path, kind)) |res| { - return res; - } - // r.allocator.free(abs_path); } - dir_info = dir_info.getParent() orelse break; + if (r.loadAsFileOrDirectory(abs_path, kind)) |res| { + return res; + } + // r.allocator.free(abs_path); } - // Mostly to cut scope, we don't resolve `NODE_PATH` environment variable. - // But also: https://github.com/nodejs/node/issues/38128#issuecomment-814969356 - - return null; + dir_info = dir_info.getParent() orelse break; } - pub fn resolveWithoutRemapping(r: *ThisResolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult { - if (isPackagePath(import_path)) { - return r.loadNodeModules(import_path, kind, source_dir_info); - } else { - const paths = [_]string{ source_dir_info.abs_path, import_path }; - var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf); - return r.loadAsFileOrDirectory(resolved, kind); - } - } - - pub fn parseTSConfig( - r: *ThisResolver, - file: string, - dirname_fd: StoredFileDescriptorType, - ) !?*TSConfigJSON { - const entry = try r.caches.fs.readFile( - r.fs, - file, - dirname_fd, - false, - null, - ); - const key_path = Path.init(file); - - const source = logger.Source.initPathString(key_path.text, entry.contents); - const file_dir = source.path.sourceDir(); + // Mostly to cut scope, we don't resolve `NODE_PATH` environment variable. + // But also: https://github.com/nodejs/node/issues/38128#issuecomment-814969356 - var result = (try TSConfigJSON.parse(r.allocator, r.log, source, @TypeOf(r.caches.json), &r.caches.json)) orelse return null; + return null; + } - if (result.hasBaseURL()) { - // this might leak - if (!std.fs.path.isAbsolute(result.base_url)) { - const paths = [_]string{ file_dir, result.base_url }; - result.base_url = r.fs.dirname_store.append(string, r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable; - } - } + pub fn resolveWithoutRemapping(r: *ThisResolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult { + if (isPackagePath(import_path)) { + return r.loadNodeModules(import_path, kind, source_dir_info); + } else { + const paths = [_]string{ source_dir_info.abs_path, import_path }; + var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf); + return r.loadAsFileOrDirectory(resolved, kind); + } + } - if (result.paths.count() > 0 and (result.base_url_for_paths.len == 0 or !std.fs.path.isAbsolute(result.base_url_for_paths))) { - // this might leak + pub fn parseTSConfig( + r: *ThisResolver, + file: string, + dirname_fd: StoredFileDescriptorType, + ) !?*TSConfigJSON { + const entry = try r.caches.fs.readFile( + r.fs, + file, + dirname_fd, + false, + null, + ); + const key_path = Path.init(file); + + const source = logger.Source.initPathString(key_path.text, entry.contents); + const file_dir = source.path.sourceDir(); + + var result = (try TSConfigJSON.parse(r.allocator, r.log, source, @TypeOf(r.caches.json), &r.caches.json)) orelse return null; + + if (result.hasBaseURL()) { + // this might leak + if (!std.fs.path.isAbsolute(result.base_url)) { const paths = [_]string{ file_dir, result.base_url }; - result.base_url_for_paths = r.fs.dirname_store.append(string, r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable; + result.base_url = r.fs.dirname_store.append(string, r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable; } - - return result; } - // TODO: - pub fn prettyPath(r: *ThisResolver, path: Path) string { - return path.text; + if (result.paths.count() > 0 and (result.base_url_for_paths.len == 0 or !std.fs.path.isAbsolute(result.base_url_for_paths))) { + // this might leak + const paths = [_]string{ file_dir, result.base_url }; + result.base_url_for_paths = r.fs.dirname_store.append(string, r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable; } - pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON { - if (!cache_files or r.opts.node_modules_bundle != null) { - const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true) orelse return null; - var _pkg = try r.allocator.create(PackageJSON); - _pkg.* = pkg; - return _pkg; - } else { - const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, false) orelse return null; - var _pkg = try r.allocator.create(PackageJSON); - _pkg.* = pkg; - return _pkg; - } - } + return result; + } - fn dirInfoCached( - r: *ThisResolver, - path: string, - ) !?*DirInfo { - return try r.dirInfoCachedMaybeLog(path, true, true); - } + // TODO: + pub fn prettyPath(r: *ThisResolver, path: Path) string { + return path.text; + } - pub fn readDirInfo( - r: *ThisResolver, - path: string, - ) !?*DirInfo { - return try r.dirInfoCachedMaybeLog(path, false, true); - } + pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON { + const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true) orelse return null; + var _pkg = try r.allocator.create(PackageJSON); + _pkg.* = pkg; + return _pkg; + } - pub fn readDirInfoIgnoreError( - r: *ThisResolver, - path: string, - ) ?*const DirInfo { - return r.dirInfoCachedMaybeLog(path, false, true) catch null; - } + fn dirInfoCached( + r: *ThisResolver, + path: string, + ) !?*DirInfo { + return try r.dirInfoCachedMaybeLog(path, true, true); + } - pub inline fn readDirInfoCacheOnly( - r: *ThisResolver, - path: string, - ) ?*DirInfo { - return r.dir_cache.get(path); - } + pub fn readDirInfo( + r: *ThisResolver, + path: string, + ) !?*DirInfo { + return try r.dirInfoCachedMaybeLog(path, false, true); + } + + pub fn readDirInfoIgnoreError( + r: *ThisResolver, + path: string, + ) ?*const DirInfo { + return r.dirInfoCachedMaybeLog(path, false, true) catch null; + } - inline fn dirInfoCachedMaybeLog(r: *ThisResolver, __path: string, comptime enable_logging: bool, comptime follow_symlinks: bool) !?*DirInfo { - r.mutex.lock(); - defer r.mutex.unlock(); - var _path = __path; - if (strings.eqlComptime(_path, "./") or strings.eqlComptime(_path, ".")) - _path = r.fs.top_level_dir; + pub inline fn readDirInfoCacheOnly( + r: *ThisResolver, + path: string, + ) ?*DirInfo { + return r.dir_cache.get(path); + } - const top_result = try r.dir_cache.getOrPut(_path); - if (top_result.status != .unknown) { - return r.dir_cache.atIndex(top_result.index); - } + inline fn dirInfoCachedMaybeLog(r: *ThisResolver, __path: string, comptime enable_logging: bool, comptime follow_symlinks: bool) !?*DirInfo { + r.mutex.lock(); + defer r.mutex.unlock(); + var _path = __path; + if (strings.eqlComptime(_path, "./") or strings.eqlComptime(_path, ".")) + _path = r.fs.top_level_dir; - var i: i32 = 1; - std.mem.copy(u8, &dir_info_uncached_path_buf, _path); - var path = dir_info_uncached_path_buf[0.._path.len]; + const top_result = try r.dir_cache.getOrPut(_path); + if (top_result.status != .unknown) { + return r.dir_cache.atIndex(top_result.index); + } + + var i: i32 = 1; + std.mem.copy(u8, &dir_info_uncached_path_buf, _path); + var path = dir_info_uncached_path_buf[0.._path.len]; - _dir_entry_paths_to_resolve[0] = (DirEntryResolveQueueItem{ .result = top_result, .unsafe_path = path, .safe_path = "" }); - var top = Dirname.dirname(path); + _dir_entry_paths_to_resolve[0] = (DirEntryResolveQueueItem{ .result = top_result, .unsafe_path = path, .safe_path = "" }); + var top = Dirname.dirname(path); - var top_parent: allocators.Result = allocators.Result{ - .index = allocators.NotFound, - .hash = 0, - .status = .not_found, + var top_parent: allocators.Result = allocators.Result{ + .index = allocators.NotFound, + .hash = 0, + .status = .not_found, + }; + const root_path = if (comptime isWindows) + std.fs.path.diskDesignator(path) + else + // we cannot just use "/" + // we will write to the buffer past the ptr len so it must be a non-const buffer + path[0..1]; + var rfs: *Fs.FileSystem.RealFS = &r.fs.fs; + + rfs.entries_mutex.lock(); + defer rfs.entries_mutex.unlock(); + + while (!strings.eql(top, root_path)) : (top = Dirname.dirname(top)) { + var result = try r.dir_cache.getOrPut(top); + if (result.status != .unknown) { + top_parent = result; + break; + } + _dir_entry_paths_to_resolve[@intCast(usize, i)] = DirEntryResolveQueueItem{ + .unsafe_path = top, + .result = result, + .fd = 0, }; - const root_path = if (comptime isWindows) - std.fs.path.diskDesignator(path) - else - // we cannot just use "/" - // we will write to the buffer past the ptr len so it must be a non-const buffer - path[0..1]; - var rfs: *Fs.FileSystem.RealFS = &r.fs.fs; - - rfs.entries_mutex.lock(); - defer rfs.entries_mutex.unlock(); - - while (!strings.eql(top, root_path)) : (top = Dirname.dirname(top)) { - var result = try r.dir_cache.getOrPut(top); - if (result.status != .unknown) { - top_parent = result; - break; - } + + if (rfs.entries.get(top)) |top_entry| { + _dir_entry_paths_to_resolve[@intCast(usize, i)].safe_path = top_entry.entries.dir; + _dir_entry_paths_to_resolve[@intCast(usize, i)].fd = top_entry.entries.fd; + } + i += 1; + } + + if (strings.eql(top, root_path)) { + var result = try r.dir_cache.getOrPut(root_path); + if (result.status != .unknown) { + top_parent = result; + } else { _dir_entry_paths_to_resolve[@intCast(usize, i)] = DirEntryResolveQueueItem{ - .unsafe_path = top, + .unsafe_path = root_path, .result = result, .fd = 0, }; - if (rfs.entries.get(top)) |top_entry| { _dir_entry_paths_to_resolve[@intCast(usize, i)].safe_path = top_entry.entries.dir; _dir_entry_paths_to_resolve[@intCast(usize, i)].fd = top_entry.entries.fd; } - i += 1; - } - - if (strings.eql(top, root_path)) { - var result = try r.dir_cache.getOrPut(root_path); - if (result.status != .unknown) { - top_parent = result; - } else { - _dir_entry_paths_to_resolve[@intCast(usize, i)] = DirEntryResolveQueueItem{ - .unsafe_path = root_path, - .result = result, - .fd = 0, - }; - if (rfs.entries.get(top)) |top_entry| { - _dir_entry_paths_to_resolve[@intCast(usize, i)].safe_path = top_entry.entries.dir; - _dir_entry_paths_to_resolve[@intCast(usize, i)].fd = top_entry.entries.fd; - } - i += 1; - } + i += 1; } + } - var queue_slice: []DirEntryResolveQueueItem = _dir_entry_paths_to_resolve[0..@intCast(usize, i)]; - std.debug.assert(queue_slice.len > 0); - var open_dir_count: usize = 0; + var queue_slice: []DirEntryResolveQueueItem = _dir_entry_paths_to_resolve[0..@intCast(usize, i)]; + std.debug.assert(queue_slice.len > 0); + var open_dir_count: usize = 0; - // When this function halts, any item not processed means it's not found. - defer { + // When this function halts, any item not processed means it's not found. + defer { - // Anything - if (open_dir_count > 0 and r.fs.fs.needToCloseFiles()) { - var open_dirs: []std.fs.Dir = _open_dirs[0..open_dir_count]; - for (open_dirs) |*open_dir| { - open_dir.close(); - } + // Anything + if (open_dir_count > 0 and r.fs.fs.needToCloseFiles()) { + var open_dirs: []std.fs.Dir = _open_dirs[0..open_dir_count]; + for (open_dirs) |*open_dir| { + open_dir.close(); } } + } - // We want to walk in a straight line from the topmost directory to the desired directory - // For each directory we visit, we get the entries, but not traverse into child directories - // (unless those child directores are in the queue) - // We go top-down instead of bottom-up to increase odds of reusing previously open file handles - // "/home/jarred/Code/node_modules/react/cjs/react.development.js" - // ^ - // If we start there, we will traverse all of /home/jarred, including e.g. /home/jarred/Downloads - // which is completely irrelevant. - - // After much experimentation... - // - fts_open is not the fastest way to read directories. fts actually just uses readdir!! - // - remember - var _safe_path: ?string = null; - - // Start at the top. - while (queue_slice.len > 0) { - var queue_top = queue_slice[queue_slice.len - 1]; - defer top_parent = queue_top.result; - queue_slice.len -= 1; - - var _open_dir: anyerror!std.fs.Dir = undefined; - if (queue_top.fd == 0) { - - // This saves us N copies of .toPosixPath - // which was likely the perf gain from resolving directories relative to the parent directory, anyway. - const prev_char = path.ptr[queue_top.unsafe_path.len]; - path.ptr[queue_top.unsafe_path.len] = 0; - defer path.ptr[queue_top.unsafe_path.len] = prev_char; - var sentinel = path.ptr[0..queue_top.unsafe_path.len :0]; - _open_dir = std.fs.openDirAbsoluteZ( - sentinel, - .{ - .iterate = true, - .no_follow = !follow_symlinks, - }, - ); - // } - } + // We want to walk in a straight line from the topmost directory to the desired directory + // For each directory we visit, we get the entries, but not traverse into child directories + // (unless those child directores are in the queue) + // We go top-down instead of bottom-up to increase odds of reusing previously open file handles + // "/home/jarred/Code/node_modules/react/cjs/react.development.js" + // ^ + // If we start there, we will traverse all of /home/jarred, including e.g. /home/jarred/Downloads + // which is completely irrelevant. + + // After much experimentation... + // - fts_open is not the fastest way to read directories. fts actually just uses readdir!! + // - remember + var _safe_path: ?string = null; + + // Start at the top. + while (queue_slice.len > 0) { + var queue_top = queue_slice[queue_slice.len - 1]; + defer top_parent = queue_top.result; + queue_slice.len -= 1; + + var _open_dir: anyerror!std.fs.Dir = undefined; + if (queue_top.fd == 0) { + + // This saves us N copies of .toPosixPath + // which was likely the perf gain from resolving directories relative to the parent directory, anyway. + const prev_char = path.ptr[queue_top.unsafe_path.len]; + path.ptr[queue_top.unsafe_path.len] = 0; + defer path.ptr[queue_top.unsafe_path.len] = prev_char; + var sentinel = path.ptr[0..queue_top.unsafe_path.len :0]; + _open_dir = std.fs.openDirAbsoluteZ( + sentinel, + .{ + .iterate = true, + .no_follow = !follow_symlinks, + }, + ); + // } + } - const open_dir = if (queue_top.fd != 0) std.fs.Dir{ .fd = queue_top.fd } else (_open_dir catch |err| { - switch (err) { - error.EACCESS => {}, - - // Ignore "ENOTDIR" here so that calling "ReadDirectory" on a file behaves - // as if there is nothing there at all instead of causing an error due to - // the directory actually being a file. This is a workaround for situations - // where people try to import from a path containing a file as a parent - // directory. The "pnpm" package manager generates a faulty "NODE_PATH" - // list which contains such paths and treating them as missing means we just - // ignore them during path resolution. - error.ENOENT, - error.ENOTDIR, - error.IsDir, - error.NotDir, - error.FileNotFound, - => { - return null; - }, + const open_dir = if (queue_top.fd != 0) std.fs.Dir{ .fd = queue_top.fd } else (_open_dir catch |err| { + switch (err) { + error.EACCESS => {}, + + // Ignore "ENOTDIR" here so that calling "ReadDirectory" on a file behaves + // as if there is nothing there at all instead of causing an error due to + // the directory actually being a file. This is a workaround for situations + // where people try to import from a path containing a file as a parent + // directory. The "pnpm" package manager generates a faulty "NODE_PATH" + // list which contains such paths and treating them as missing means we just + // ignore them during path resolution. + error.ENOENT, + error.ENOTDIR, + error.IsDir, + error.NotDir, + error.FileNotFound, + => { + return null; + }, - else => { - var cached_dir_entry_result = rfs.entries.getOrPut(queue_top.unsafe_path) catch unreachable; - r.dir_cache.markNotFound(queue_top.result); - rfs.entries.markNotFound(cached_dir_entry_result); - if (comptime enable_logging) { - const pretty = r.prettyPath(Path.init(queue_top.unsafe_path)); - - r.log.addErrorFmt( - null, - logger.Loc{}, - r.allocator, - "Cannot read directory \"{s}\": {s}", - .{ - pretty, - @errorName(err), - }, - ) catch {}; - } - }, - } + else => { + var cached_dir_entry_result = rfs.entries.getOrPut(queue_top.unsafe_path) catch unreachable; + r.dir_cache.markNotFound(queue_top.result); + rfs.entries.markNotFound(cached_dir_entry_result); + if (comptime enable_logging) { + const pretty = r.prettyPath(Path.init(queue_top.unsafe_path)); + + r.log.addErrorFmt( + null, + logger.Loc{}, + r.allocator, + "Cannot read directory \"{s}\": {s}", + .{ + pretty, + @errorName(err), + }, + ) catch {}; + } + }, + } - return null; - }); + return null; + }); - if (queue_top.fd == 0) { - Fs.FileSystem.setMaxFd(open_dir.fd); - // these objects mostly just wrap the file descriptor, so it's fine to keep it. - _open_dirs[open_dir_count] = open_dir; - open_dir_count += 1; - } + if (queue_top.fd == 0) { + Fs.FileSystem.setMaxFd(open_dir.fd); + // these objects mostly just wrap the file descriptor, so it's fine to keep it. + _open_dirs[open_dir_count] = open_dir; + open_dir_count += 1; + } - const dir_path = if (queue_top.safe_path.len > 0) queue_top.safe_path else brk: { + const dir_path = if (queue_top.safe_path.len > 0) queue_top.safe_path else brk: { - // ensure trailing slash - if (_safe_path == null) { - // Now that we've opened the topmost directory successfully, it's reasonable to store the slice. - if (path[path.len - 1] != std.fs.path.sep) { - var parts = [_]string{ path, std.fs.path.sep_str }; - _safe_path = try r.fs.dirname_store.append(@TypeOf(parts), parts); - } else { - _safe_path = try r.fs.dirname_store.append(string, path); - } + // ensure trailing slash + if (_safe_path == null) { + // Now that we've opened the topmost directory successfully, it's reasonable to store the slice. + if (path[path.len - 1] != std.fs.path.sep) { + var parts = [_]string{ path, std.fs.path.sep_str }; + _safe_path = try r.fs.dirname_store.append(@TypeOf(parts), parts); + } else { + _safe_path = try r.fs.dirname_store.append(string, path); } + } - const safe_path = _safe_path.?; + const safe_path = _safe_path.?; - var dir_path_i = std.mem.indexOf(u8, safe_path, queue_top.unsafe_path) orelse unreachable; - var end = dir_path_i + - queue_top.unsafe_path.len; + var dir_path_i = std.mem.indexOf(u8, safe_path, queue_top.unsafe_path) orelse unreachable; + var end = dir_path_i + + queue_top.unsafe_path.len; - // Directories must always end in a trailing slash or else various bugs can occur. - // This covers "what happens when the trailing" - end += @intCast(usize, @boolToInt(safe_path.len > end and end > 0 and safe_path[end - 1] != std.fs.path.sep and safe_path[end] == std.fs.path.sep)); - break :brk safe_path[dir_path_i..end]; - }; + // Directories must always end in a trailing slash or else various bugs can occur. + // This covers "what happens when the trailing" + end += @intCast(usize, @boolToInt(safe_path.len > end and end > 0 and safe_path[end - 1] != std.fs.path.sep and safe_path[end] == std.fs.path.sep)); + break :brk safe_path[dir_path_i..end]; + }; - var cached_dir_entry_result = rfs.entries.getOrPut(dir_path) catch unreachable; + var cached_dir_entry_result = rfs.entries.getOrPut(dir_path) catch unreachable; - var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined; - var needs_iter: bool = true; + var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined; + var needs_iter: bool = true; - if (rfs.entries.atIndex(cached_dir_entry_result.index)) |cached_entry| { - if (cached_entry.* == .entries) { - dir_entries_option = cached_entry; - needs_iter = false; - } + if (rfs.entries.atIndex(cached_dir_entry_result.index)) |cached_entry| { + if (cached_entry.* == .entries) { + dir_entries_option = cached_entry; + needs_iter = false; } + } - if (needs_iter) { - dir_entries_option = try rfs.entries.put(&cached_dir_entry_result, .{ - .entries = Fs.FileSystem.DirEntry.init(dir_path, r.fs.allocator), - }); + if (needs_iter) { + dir_entries_option = try rfs.entries.put(&cached_dir_entry_result, .{ + .entries = Fs.FileSystem.DirEntry.init(dir_path, r.fs.allocator), + }); - if (FeatureFlags.store_file_descriptors) { - Fs.FileSystem.setMaxFd(open_dir.fd); - dir_entries_option.entries.fd = open_dir.fd; - } - var dir_iterator = open_dir.iterate(); - while (try dir_iterator.next()) |_value| { - dir_entries_option.entries.addEntry(_value) catch unreachable; - } + if (FeatureFlags.store_file_descriptors) { + Fs.FileSystem.setMaxFd(open_dir.fd); + dir_entries_option.entries.fd = open_dir.fd; } - - // We must initialize it as empty so that the result index is correct. - // This is important so that browser_scope has a valid index. - var dir_info_ptr = try r.dir_cache.put(&queue_top.result, DirInfo{}); - - try r.dirInfoUncached( - dir_info_ptr, - dir_path, - dir_entries_option, - queue_top.result, - cached_dir_entry_result.index, - r.dir_cache.atIndex(top_parent.index), - top_parent.index, - open_dir.fd, - ); - - if (queue_slice.len == 0) { - return dir_info_ptr; - - // Is the directory we're searching for actually a file? - } else if (queue_slice.len == 1) { - // const next_in_queue = queue_slice[0]; - // const next_basename = std.fs.path.basename(next_in_queue.unsafe_path); - // if (dir_info_ptr.getEntries()) |entries| { - // if (entries.get(next_basename) != null) { - // return null; - // } - // } + var dir_iterator = open_dir.iterate(); + while (try dir_iterator.next()) |_value| { + dir_entries_option.entries.addEntry(_value) catch unreachable; } } - unreachable; - } + // We must initialize it as empty so that the result index is correct. + // This is important so that browser_scope has a valid index. + var dir_info_ptr = try r.dir_cache.put(&queue_top.result, DirInfo{}); - // This closely follows the behavior of "tryLoadModuleUsingPaths()" in the - // official TypeScript compiler - pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *const TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable; - } + try r.dirInfoUncached( + dir_info_ptr, + dir_path, + dir_entries_option, + queue_top.result, + cached_dir_entry_result.index, + r.dir_cache.atIndex(top_parent.index), + top_parent.index, + open_dir.fd, + ); - var abs_base_url = tsconfig.base_url_for_paths; + if (queue_slice.len == 0) { + return dir_info_ptr; - // The explicit base URL should take precedence over the implicit base URL - // if present. This matters when a tsconfig.json file overrides "baseUrl" - // from another extended tsconfig.json file but doesn't override "paths". - if (tsconfig.hasBaseURL()) { - abs_base_url = tsconfig.base_url; + // Is the directory we're searching for actually a file? + } else if (queue_slice.len == 1) { + // const next_in_queue = queue_slice[0]; + // const next_basename = std.fs.path.basename(next_in_queue.unsafe_path); + // if (dir_info_ptr.getEntries()) |entries| { + // if (entries.get(next_basename) != null) { + // return null; + // } + // } } + } - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url}) catch unreachable; - } + unreachable; + } - // Check for exact matches first - { - var iter = tsconfig.paths.iterator(); - while (iter.next()) |entry| { - const key = entry.key_ptr.*; - - if (strings.eql(key, path)) { - for (entry.value_ptr.*) |original_path| { - var absolute_original_path = original_path; - var was_alloc = false; - - if (!std.fs.path.isAbsolute(absolute_original_path)) { - const parts = [_]string{ abs_base_url, original_path }; - absolute_original_path = r.fs.absBuf(&parts, &tsconfig_path_abs_buf); - } + // This closely follows the behavior of "tryLoadModuleUsingPaths()" in the + // official TypeScript compiler + pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *const TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable; + } - if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| { - return res; - } - } - } - } - } + var abs_base_url = tsconfig.base_url_for_paths; - const TSConfigMatch = struct { - prefix: string, - suffix: string, - original_paths: []string, - }; + // The explicit base URL should take precedence over the implicit base URL + // if present. This matters when a tsconfig.json file overrides "baseUrl" + // from another extended tsconfig.json file but doesn't override "paths". + if (tsconfig.hasBaseURL()) { + abs_base_url = tsconfig.base_url; + } - var longest_match: TSConfigMatch = undefined; - var longest_match_prefix_length: i32 = -1; - var longest_match_suffix_length: i32 = -1; + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url}) catch unreachable; + } + // Check for exact matches first + { var iter = tsconfig.paths.iterator(); while (iter.next()) |entry| { const key = entry.key_ptr.*; - const original_paths = entry.value_ptr.*; - - if (strings.indexOfChar(key, '*')) |star| { - const prefix = key[0 .. star - 1]; - const suffix = key[star + 1 ..]; - - // Find the match with the longest prefix. If two matches have the same - // prefix length, pick the one with the longest suffix. This second edge - // case isn't handled by the TypeScript compiler, but we handle it - // because we want the output to always be deterministic - if (strings.startsWith(path, prefix) and - strings.endsWith(path, suffix) and - (prefix.len >= longest_match_prefix_length and - suffix.len > longest_match_suffix_length)) - { - longest_match_prefix_length = @intCast(i32, prefix.len); - longest_match_suffix_length = @intCast(i32, suffix.len); - longest_match = TSConfigMatch{ .prefix = prefix, .suffix = suffix, .original_paths = original_paths }; + + if (strings.eql(key, path)) { + for (entry.value_ptr.*) |original_path| { + var absolute_original_path = original_path; + var was_alloc = false; + + if (!std.fs.path.isAbsolute(absolute_original_path)) { + const parts = [_]string{ abs_base_url, original_path }; + absolute_original_path = r.fs.absBuf(&parts, &tsconfig_path_abs_buf); + } + + if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| { + return res; + } } } } + } - // If there is at least one match, only consider the one with the longest - // prefix. This matches the behavior of the TypeScript compiler. - if (longest_match_prefix_length > -1) { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix }) catch unreachable; + const TSConfigMatch = struct { + prefix: string, + suffix: string, + original_paths: []string, + }; + + var longest_match: TSConfigMatch = undefined; + var longest_match_prefix_length: i32 = -1; + var longest_match_suffix_length: i32 = -1; + + var iter = tsconfig.paths.iterator(); + while (iter.next()) |entry| { + const key = entry.key_ptr.*; + const original_paths = entry.value_ptr.*; + + if (strings.indexOfChar(key, '*')) |star| { + const prefix = key[0 .. star - 1]; + const suffix = key[star + 1 ..]; + + // Find the match with the longest prefix. If two matches have the same + // prefix length, pick the one with the longest suffix. This second edge + // case isn't handled by the TypeScript compiler, but we handle it + // because we want the output to always be deterministic + if (strings.startsWith(path, prefix) and + strings.endsWith(path, suffix) and + (prefix.len >= longest_match_prefix_length and + suffix.len > longest_match_suffix_length)) + { + longest_match_prefix_length = @intCast(i32, prefix.len); + longest_match_suffix_length = @intCast(i32, suffix.len); + longest_match = TSConfigMatch{ .prefix = prefix, .suffix = suffix, .original_paths = original_paths }; } + } + } + + // If there is at least one match, only consider the one with the longest + // prefix. This matches the behavior of the TypeScript compiler. + if (longest_match_prefix_length > -1) { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix }) catch unreachable; + } - for (longest_match.original_paths) |original_path| { - // Swap out the "*" in the original path for whatever the "*" matched - const matched_text = path[longest_match.prefix.len .. path.len - longest_match.suffix.len]; + for (longest_match.original_paths) |original_path| { + // Swap out the "*" in the original path for whatever the "*" matched + const matched_text = path[longest_match.prefix.len .. path.len - longest_match.suffix.len]; - const total_length = std.mem.indexOfScalar(u8, original_path, '*') orelse unreachable; - var prefix_parts = [_]string{ abs_base_url, original_path[0..total_length] }; + const total_length = std.mem.indexOfScalar(u8, original_path, '*') orelse unreachable; + var prefix_parts = [_]string{ abs_base_url, original_path[0..total_length] }; - // 1. Normalize the base path - // so that "/Users/foo/project/", "../components/*" => "/Users/foo/components/"" - var prefix = r.fs.absBuf(&prefix_parts, &TemporaryBuffer.TSConfigMatchFullBuf2); + // 1. Normalize the base path + // so that "/Users/foo/project/", "../components/*" => "/Users/foo/components/"" + var prefix = r.fs.absBuf(&prefix_parts, &TemporaryBuffer.TSConfigMatchFullBuf2); - // 2. Join the new base path with the matched result - // so that "/Users/foo/components/", "/foo/bar" => /Users/foo/components/foo/bar - var parts = [_]string{ prefix, std.mem.trimLeft(u8, matched_text, "/"), std.mem.trimLeft(u8, longest_match.suffix, "/") }; - var absolute_original_path = r.fs.absBuf( - &parts, - &TemporaryBuffer.TSConfigMatchFullBuf, - ); + // 2. Join the new base path with the matched result + // so that "/Users/foo/components/", "/foo/bar" => /Users/foo/components/foo/bar + var parts = [_]string{ prefix, std.mem.trimLeft(u8, matched_text, "/"), std.mem.trimLeft(u8, longest_match.suffix, "/") }; + var absolute_original_path = r.fs.absBuf( + &parts, + &TemporaryBuffer.TSConfigMatchFullBuf, + ); - if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| { - return res; - } + if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| { + return res; } } - - return null; } - const BrowserMapPath = struct { - remapped: string = "", - cleaned: string = "", - input_path: string = "", - extension_order: []const string, - map: BrowserMap, - - pub threadlocal var abs_to_rel_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; - - pub const Kind = enum { PackagePath, AbsolutePath }; - - pub fn checkPath( - this: *BrowserMapPath, - path_to_check: string, - ) bool { - const map = this.map; + return null; + } - const cleaned = this.cleaned; - // Check for equality - if (this.map.get(path_to_check)) |result| { - this.remapped = result; - this.input_path = path_to_check; + const BrowserMapPath = struct { + remapped: string = "", + cleaned: string = "", + input_path: string = "", + extension_order: []const string, + map: BrowserMap, + + pub threadlocal var abs_to_rel_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + + pub const Kind = enum { PackagePath, AbsolutePath }; + + pub fn checkPath( + this: *BrowserMapPath, + path_to_check: string, + ) bool { + const map = this.map; + + const cleaned = this.cleaned; + // Check for equality + if (this.map.get(path_to_check)) |result| { + this.remapped = result; + this.input_path = path_to_check; + return true; + } + + std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned); + + // If that failed, try adding implicit extensions + for (this.extension_order) |ext| { + std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext); + const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len]; + // if (r.debug_logs) |*debug| { + // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {}; + // } + if (map.get(new_path)) |_remapped| { + this.remapped = _remapped; + this.cleaned = new_path; + this.input_path = new_path; return true; } + } - std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned); - - // If that failed, try adding implicit extensions - for (this.extension_order) |ext| { - std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext); - const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len]; - // if (r.debug_logs) |*debug| { - // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {}; - // } - if (map.get(new_path)) |_remapped| { - this.remapped = _remapped; - this.cleaned = new_path; - this.input_path = new_path; - return true; - } - } + // If that failed, try assuming this is a directory and looking for an "index" file - // If that failed, try assuming this is a directory and looking for an "index" file + var index_path: string = ""; + { + var parts = [_]string{ std.mem.trimRight(u8, path_to_check, std.fs.path.sep_str), std.fs.path.sep_str ++ "index" }; + index_path = ResolvePath.joinStringBuf(&tsconfig_base_url_buf, &parts, .auto); + } - var index_path: string = ""; - { - var parts = [_]string{ std.mem.trimRight(u8, path_to_check, std.fs.path.sep_str), std.fs.path.sep_str ++ "index" }; - index_path = ResolvePath.joinStringBuf(&tsconfig_base_url_buf, &parts, .auto); - } + if (map.get(index_path)) |_remapped| { + this.remapped = _remapped; + this.input_path = index_path; + return true; + } + + std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, index_path); - if (map.get(index_path)) |_remapped| { + for (this.extension_order) |ext| { + std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len .. index_path.len + ext.len], ext); + const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. index_path.len + ext.len]; + // if (r.debug_logs) |*debug| { + // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {}; + // } + if (map.get(new_path)) |_remapped| { this.remapped = _remapped; - this.input_path = index_path; + this.cleaned = new_path; + this.input_path = new_path; return true; } - - std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, index_path); - - for (this.extension_order) |ext| { - std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len .. index_path.len + ext.len], ext); - const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. index_path.len + ext.len]; - // if (r.debug_logs) |*debug| { - // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {}; - // } - if (map.get(new_path)) |_remapped| { - this.remapped = _remapped; - this.cleaned = new_path; - this.input_path = new_path; - return true; - } - } - - return false; } - }; - - pub fn checkBrowserMap( - r: *ThisResolver, - dir_info: *const DirInfo, - input_path_: string, - comptime kind: BrowserMapPath.Kind, - ) ?string { - const package_json = dir_info.package_json orelse return null; - const browser_map = package_json.browser_map; - if (browser_map.count() == 0) return null; + return false; + } + }; - var input_path = input_path_; + pub fn checkBrowserMap( + r: *ThisResolver, + dir_info: *const DirInfo, + input_path_: string, + comptime kind: BrowserMapPath.Kind, + ) ?string { + const package_json = dir_info.package_json orelse return null; + const browser_map = package_json.browser_map; - if (comptime kind == .AbsolutePath) { - const abs_path = dir_info.abs_path; - // Turn absolute paths into paths relative to the "browser" map location - if (!strings.startsWith(input_path, abs_path)) { - return null; - } + if (browser_map.count() == 0) return null; - input_path = input_path[abs_path.len..]; - } + var input_path = input_path_; - if (input_path.len == 0 or (input_path.len == 1 and (input_path[0] == '.' or input_path[0] == std.fs.path.sep))) { - // No bundler supports remapping ".", so we don't either + if (comptime kind == .AbsolutePath) { + const abs_path = dir_info.abs_path; + // Turn absolute paths into paths relative to the "browser" map location + if (!strings.startsWith(input_path, abs_path)) { return null; } - // Normalize the path so we can compare against it without getting confused by "./" - var cleaned = r.fs.normalizeBuf(&check_browser_map_buf, input_path); + input_path = input_path[abs_path.len..]; + } - if (cleaned.len == 1 and cleaned[0] == '.') { - // No bundler supports remapping ".", so we don't either - return null; - } + if (input_path.len == 0 or (input_path.len == 1 and (input_path[0] == '.' or input_path[0] == std.fs.path.sep))) { + // No bundler supports remapping ".", so we don't either + return null; + } - var checker = BrowserMapPath{ - .remapped = "", - .cleaned = cleaned, - .input_path = input_path, - .extension_order = r.extension_order, - .map = package_json.browser_map, - }; + // Normalize the path so we can compare against it without getting confused by "./" + var cleaned = r.fs.normalizeBuf(&check_browser_map_buf, input_path); - if (checker.checkPath(input_path)) { - return checker.remapped; - } + if (cleaned.len == 1 and cleaned[0] == '.') { + // No bundler supports remapping ".", so we don't either + return null; + } + + var checker = BrowserMapPath{ + .remapped = "", + .cleaned = cleaned, + .input_path = input_path, + .extension_order = r.extension_order, + .map = package_json.browser_map, + }; - // First try the import path as a package path - if (isPackagePath(checker.input_path)) { - switch (comptime kind) { - .AbsolutePath => { + if (checker.checkPath(input_path)) { + return checker.remapped; + } + + // First try the import path as a package path + if (isPackagePath(checker.input_path)) { + switch (comptime kind) { + .AbsolutePath => { + BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*; + std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); + if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) { + return checker.remapped; + } + }, + .PackagePath => { + // Browserify allows a browser map entry of "./pkg" to override a package + // path of "require('pkg')". This is weird, and arguably a bug. But we + // replicate this bug for compatibility. However, Browserify only allows + // this within the same package. It does not allow such an entry in a + // parent package to override this in a child package. So this behavior + // is disallowed if there is a "node_modules" folder in between the child + // package and the parent package. + const isInSamePackage = brk: { + const parent = dir_info.getParent() orelse break :brk true; + break :brk !parent.is_node_modules; + }; + + if (isInSamePackage) { BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*; std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); + if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) { return checker.remapped; } - }, - .PackagePath => { - // Browserify allows a browser map entry of "./pkg" to override a package - // path of "require('pkg')". This is weird, and arguably a bug. But we - // replicate this bug for compatibility. However, Browserify only allows - // this within the same package. It does not allow such an entry in a - // parent package to override this in a child package. So this behavior - // is disallowed if there is a "node_modules" folder in between the child - // package and the parent package. - const isInSamePackage = brk: { - const parent = dir_info.getParent() orelse break :brk true; - break :brk !parent.is_node_modules; - }; - - if (isInSamePackage) { - BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*; - std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); - - if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) { - return checker.remapped; - } - } - }, - } + } + }, } + } - return null; + return null; + } + + pub fn loadFromMainField(r: *ThisResolver, path: string, dir_info: *DirInfo, _field_rel_path: string, field: string, extension_order: []const string) ?MatchResult { + var field_rel_path = _field_rel_path; + // Is this a directory? + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path }) catch {}; + debug.increaseIndent() catch {}; } - pub fn loadFromMainField(r: *ThisResolver, path: string, dir_info: *DirInfo, _field_rel_path: string, field: string, extension_order: []const string) ?MatchResult { - var field_rel_path = _field_rel_path; - // Is this a directory? + defer { if (r.debug_logs) |*debug| { - debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path }) catch {}; - debug.increaseIndent() catch {}; - } - - defer { - if (r.debug_logs) |*debug| { - debug.decreaseIndent() catch {}; - } + debug.decreaseIndent() catch {}; } + } - // Potentially remap using the "browser" field - if (dir_info.getEnclosingBrowserScope()) |browser_scope| { - if (browser_scope.package_json) |browser_json| { - if (r.checkBrowserMap( - browser_scope, - field_rel_path, - .AbsolutePath, - )) |remap| { - // Is the path disabled? - if (remap.len == 0) { - const paths = [_]string{ path, field_rel_path }; - const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable; - var _path = Path.init(new_path); - _path.is_disabled = true; - return MatchResult{ - .path_pair = PathPair{ - .primary = _path, - }, - .package_json = browser_json, - }; - } - - field_rel_path = remap; + // Potentially remap using the "browser" field + if (dir_info.getEnclosingBrowserScope()) |browser_scope| { + if (browser_scope.package_json) |browser_json| { + if (r.checkBrowserMap( + browser_scope, + field_rel_path, + .AbsolutePath, + )) |remap| { + // Is the path disabled? + if (remap.len == 0) { + const paths = [_]string{ path, field_rel_path }; + const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable; + var _path = Path.init(new_path); + _path.is_disabled = true; + return MatchResult{ + .path_pair = PathPair{ + .primary = _path, + }, + .package_json = browser_json, + }; } - } - } - const _paths = [_]string{ path, field_rel_path }; - const field_abs_path = r.fs.absBuf(&_paths, &field_abs_path_buf); - // Is this a file? - if (r.loadAsFile(field_abs_path, extension_order)) |result| { - if (dir_info.package_json) |package_json| { - return MatchResult{ - .path_pair = PathPair{ .primary = Fs.Path.init(result.path) }, - .package_json = package_json, - .dirname_fd = result.dirname_fd, - }; + field_rel_path = remap; } + } + } + const _paths = [_]string{ path, field_rel_path }; + const field_abs_path = r.fs.absBuf(&_paths, &field_abs_path_buf); + // Is this a file? + if (r.loadAsFile(field_abs_path, extension_order)) |result| { + if (dir_info.package_json) |package_json| { return MatchResult{ .path_pair = PathPair{ .primary = Fs.Path.init(result.path) }, + .package_json = package_json, .dirname_fd = result.dirname_fd, - .diff_case = result.diff_case, }; } - // Is it a directory with an index? - const field_dir_info = (r.dirInfoCached(field_abs_path) catch null) orelse { - return null; - }; - - return r.loadAsIndexWithBrowserRemapping(field_dir_info, field_abs_path, extension_order) orelse { - return null; + return MatchResult{ + .path_pair = PathPair{ .primary = Fs.Path.init(result.path) }, + .dirname_fd = result.dirname_fd, + .diff_case = result.diff_case, }; } - pub fn loadAsIndex(r: *ThisResolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult { - var rfs = &r.fs.fs; - // Try the "index" file with extensions - for (extension_order) |ext| { - var base = TemporaryBuffer.ExtensionPathBuf[0 .. "index".len + ext.len]; - base[0.."index".len].* = "index".*; - std.mem.copy(u8, base["index".len..base.len], ext); - - if (dir_info.getEntries()) |entries| { - if (entries.get(base)) |lookup| { - if (lookup.entry.kind(rfs) == .file) { - const out_buf = brk: { - if (lookup.entry.abs_path.isEmpty()) { - const parts = [_]string{ path, base }; - const out_buf_ = r.fs.absBuf(&parts, &index_buf); - lookup.entry.abs_path = - PathString.init(r.fs.dirname_store.append(@TypeOf(out_buf_), out_buf_) catch unreachable); - } - break :brk lookup.entry.abs_path.slice(); - }; + // Is it a directory with an index? + const field_dir_info = (r.dirInfoCached(field_abs_path) catch null) orelse { + return null; + }; - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable; - } + return r.loadAsIndexWithBrowserRemapping(field_dir_info, field_abs_path, extension_order) orelse { + return null; + }; + } - if (dir_info.package_json) |package_json| { - return MatchResult{ - .path_pair = .{ .primary = Path.init(out_buf) }, - .diff_case = lookup.diff_case, - .package_json = package_json, - .dirname_fd = dir_info.getFileDescriptor(), - }; + pub fn loadAsIndex(r: *ThisResolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult { + var rfs = &r.fs.fs; + // Try the "index" file with extensions + for (extension_order) |ext| { + var base = TemporaryBuffer.ExtensionPathBuf[0 .. "index".len + ext.len]; + base[0.."index".len].* = "index".*; + std.mem.copy(u8, base["index".len..base.len], ext); + + if (dir_info.getEntries()) |entries| { + if (entries.get(base)) |lookup| { + if (lookup.entry.kind(rfs) == .file) { + const out_buf = brk: { + if (lookup.entry.abs_path.isEmpty()) { + const parts = [_]string{ path, base }; + const out_buf_ = r.fs.absBuf(&parts, &index_buf); + lookup.entry.abs_path = + PathString.init(r.fs.dirname_store.append(@TypeOf(out_buf_), out_buf_) catch unreachable); } + break :brk lookup.entry.abs_path.slice(); + }; + + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable; + } + if (dir_info.package_json) |package_json| { return MatchResult{ .path_pair = .{ .primary = Path.init(out_buf) }, .diff_case = lookup.diff_case, - + .package_json = package_json, .dirname_fd = dir_info.getFileDescriptor(), }; } - } - } - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base }) catch unreachable; + return MatchResult{ + .path_pair = .{ .primary = Path.init(out_buf) }, + .diff_case = lookup.diff_case, + + .dirname_fd = dir_info.getFileDescriptor(), + }; + } } } - return null; - } - - pub fn loadAsIndexWithBrowserRemapping(r: *ThisResolver, dir_info: *DirInfo, path_: string, extension_order: []const string) ?MatchResult { - // In order for our path handling logic to be correct, it must end with a trailing slash. - var path = path_; - if (!strings.endsWithChar(path_, std.fs.path.sep)) { - std.mem.copy(u8, &remap_path_trailing_slash, path); - remap_path_trailing_slash[path.len] = std.fs.path.sep; - remap_path_trailing_slash[path.len + 1] = 0; - path = remap_path_trailing_slash[0 .. path.len + 1]; + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base }) catch unreachable; } + } - if (dir_info.getEnclosingBrowserScope()) |browser_scope| { - const field_rel_path = comptime "index"; + return null; + } - if (browser_scope.package_json) |browser_json| { - if (r.checkBrowserMap( - browser_scope, - field_rel_path, - .AbsolutePath, - )) |remap| { + pub fn loadAsIndexWithBrowserRemapping(r: *ThisResolver, dir_info: *DirInfo, path_: string, extension_order: []const string) ?MatchResult { + // In order for our path handling logic to be correct, it must end with a trailing slash. + var path = path_; + if (!strings.endsWithChar(path_, std.fs.path.sep)) { + std.mem.copy(u8, &remap_path_trailing_slash, path); + remap_path_trailing_slash[path.len] = std.fs.path.sep; + remap_path_trailing_slash[path.len + 1] = 0; + path = remap_path_trailing_slash[0 .. path.len + 1]; + } - // Is the path disabled? - if (remap.len == 0) { - const paths = [_]string{ path, field_rel_path }; - const new_path = r.fs.absBuf(&paths, &remap_path_buf); - var _path = Path.init(new_path); - _path.is_disabled = true; - return MatchResult{ - .path_pair = PathPair{ - .primary = _path, - }, - .package_json = browser_json, - }; - } + if (dir_info.getEnclosingBrowserScope()) |browser_scope| { + const field_rel_path = comptime "index"; + + if (browser_scope.package_json) |browser_json| { + if (r.checkBrowserMap( + browser_scope, + field_rel_path, + .AbsolutePath, + )) |remap| { + + // Is the path disabled? + if (remap.len == 0) { + const paths = [_]string{ path, field_rel_path }; + const new_path = r.fs.absBuf(&paths, &remap_path_buf); + var _path = Path.init(new_path); + _path.is_disabled = true; + return MatchResult{ + .path_pair = PathPair{ + .primary = _path, + }, + .package_json = browser_json, + }; + } - const new_paths = [_]string{ path, remap }; - const remapped_abs = r.fs.absBuf(&new_paths, &remap_path_buf); + const new_paths = [_]string{ path, remap }; + const remapped_abs = r.fs.absBuf(&new_paths, &remap_path_buf); - // Is this a file - if (r.loadAsFile(remapped_abs, extension_order)) |file_result| { - return MatchResult{ .dirname_fd = file_result.dirname_fd, .path_pair = .{ .primary = Path.init(file_result.path) }, .diff_case = file_result.diff_case }; - } + // Is this a file + if (r.loadAsFile(remapped_abs, extension_order)) |file_result| { + return MatchResult{ .dirname_fd = file_result.dirname_fd, .path_pair = .{ .primary = Path.init(file_result.path) }, .diff_case = file_result.diff_case }; + } - // Is it a directory with an index? - if (r.dirInfoCached(remapped_abs) catch null) |new_dir| { - if (r.loadAsIndex(new_dir, remapped_abs, extension_order)) |absolute| { - return absolute; - } + // Is it a directory with an index? + if (r.dirInfoCached(remapped_abs) catch null) |new_dir| { + if (r.loadAsIndex(new_dir, remapped_abs, extension_order)) |absolute| { + return absolute; } - - return null; } + + return null; } } + } - return r.loadAsIndex(dir_info, path_, extension_order); - } - - pub fn loadAsFileOrDirectory(r: *ThisResolver, path: string, kind: ast.ImportKind) ?MatchResult { - const extension_order = r.extension_order; - - // Is this a file? - if (r.loadAsFile(path, extension_order)) |file| { - // ServeBundler cares about the package.json - if (!cache_files) { - // Determine the package folder by looking at the last node_modules/ folder in the path - if (strings.lastIndexOf(file.path, "node_modules" ++ std.fs.path.sep_str)) |last_node_modules_folder| { - const node_modules_folder_offset = last_node_modules_folder + ("node_modules" ++ std.fs.path.sep_str).len; - // Determine the package name by looking at the next separator - if (strings.indexOfChar(file.path[node_modules_folder_offset..], std.fs.path.sep)) |package_name_length| { - if ((r.dirInfoCached(file.path[0 .. node_modules_folder_offset + package_name_length]) catch null)) |package_dir_info| { - if (package_dir_info.package_json) |package_json| { - return MatchResult{ - .path_pair = .{ .primary = Path.init(file.path) }, - .diff_case = file.diff_case, - .dirname_fd = file.dirname_fd, - .package_json = package_json, - .file_fd = file.file_fd, - }; - } - } + return r.loadAsIndex(dir_info, path_, extension_order); + } + + pub fn loadAsFileOrDirectory(r: *ThisResolver, path: string, kind: ast.ImportKind) ?MatchResult { + const extension_order = r.extension_order; + + // Is this a file? + if (r.loadAsFile(path, extension_order)) |file| { + + // Determine the package folder by looking at the last node_modules/ folder in the path + if (strings.lastIndexOf(file.path, "node_modules" ++ std.fs.path.sep_str)) |last_node_modules_folder| { + const node_modules_folder_offset = last_node_modules_folder + ("node_modules" ++ std.fs.path.sep_str).len; + // Determine the package name by looking at the next separator + if (strings.indexOfChar(file.path[node_modules_folder_offset..], std.fs.path.sep)) |package_name_length| { + if ((r.dirInfoCached(file.path[0 .. node_modules_folder_offset + package_name_length]) catch null)) |package_dir_info| { + if (package_dir_info.package_json) |package_json| { + return MatchResult{ + .path_pair = .{ .primary = Path.init(file.path) }, + .diff_case = file.diff_case, + .dirname_fd = file.dirname_fd, + .package_json = package_json, + .file_fd = file.file_fd, + }; } } } - - return MatchResult{ - .path_pair = .{ .primary = Path.init(file.path) }, - .diff_case = file.diff_case, - .dirname_fd = file.dirname_fd, - .file_fd = file.file_fd, - }; } - // Is this a directory? + return MatchResult{ + .path_pair = .{ .primary = Path.init(file.path) }, + .diff_case = file.diff_case, + .dirname_fd = file.dirname_fd, + .file_fd = file.file_fd, + }; + } + + // Is this a directory? + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path}) catch {}; + debug.increaseIndent() catch {}; + } + + defer { if (r.debug_logs) |*debug| { - debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path}) catch {}; - debug.increaseIndent() catch {}; + debug.decreaseIndent() catch {}; } + } + + const dir_info = (r.dirInfoCached(path) catch |err| { + if (comptime isDebug) Output.prettyErrorln("err: {s} reading {s}", .{ @errorName(err), path }); + return null; + }) orelse return null; + var package_json: ?*PackageJSON = null; + + // Try using the main field(s) from "package.json" + if (dir_info.package_json) |pkg_json| { + package_json = pkg_json; + if (pkg_json.main_fields.count() > 0) { + const main_field_values = pkg_json.main_fields; + const main_field_keys = r.opts.main_fields; + // TODO: check this works right. Not sure this will really work. + const auto_main = r.opts.main_fields.ptr == options.Platform.DefaultMainFields.get(r.opts.platform).ptr; - defer { if (r.debug_logs) |*debug| { - debug.decreaseIndent() catch {}; + debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text}) catch {}; } - } - const dir_info = (r.dirInfoCached(path) catch |err| { - if (comptime isDebug) Output.prettyErrorln("err: {s} reading {s}", .{ @errorName(err), path }); - return null; - }) orelse return null; - var package_json: ?*PackageJSON = null; - - // Try using the main field(s) from "package.json" - if (dir_info.package_json) |pkg_json| { - package_json = pkg_json; - if (pkg_json.main_fields.count() > 0) { - const main_field_values = pkg_json.main_fields; - const main_field_keys = r.opts.main_fields; - // TODO: check this works right. Not sure this will really work. - const auto_main = r.opts.main_fields.ptr == options.Platform.DefaultMainFields.get(r.opts.platform).ptr; + for (main_field_keys) |key| { + const field_rel_path = (main_field_values.get(key)) orelse { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Did not find main field \"{s}\"", .{key}) catch {}; + } + continue; + }; - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text}) catch {}; - } + var _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue; - for (main_field_keys) |key| { - const field_rel_path = (main_field_values.get(key)) orelse { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Did not find main field \"{s}\"", .{key}) catch {}; - } - continue; - }; + // If the user did not manually configure a "main" field order, then + // use a special per-module automatic algorithm to decide whether to + // use "module" or "main" based on whether the package is imported + // using "import" or "require". + if (auto_main and strings.eqlComptime(key, "module")) { + var absolute_result: ?MatchResult = null; - var _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue; + if (main_field_values.get("main")) |main_rel_path| { + if (main_rel_path.len > 0) { + absolute_result = r.loadFromMainField(path, dir_info, main_rel_path, "main", extension_order); + } + } else { + // Some packages have a "module" field without a "main" field but + // still have an implicit "index.js" file. In that case, treat that + // as the value for "main". + absolute_result = r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order); + } - // If the user did not manually configure a "main" field order, then - // use a special per-module automatic algorithm to decide whether to - // use "module" or "main" based on whether the package is imported - // using "import" or "require". - if (auto_main and strings.eqlComptime(key, "module")) { - var absolute_result: ?MatchResult = null; + if (absolute_result) |auto_main_result| { + // If both the "main" and "module" fields exist, use "main" if the + // path is for "require" and "module" if the path is for "import". + // If we're using "module", return enough information to be able to + // fall back to "main" later if something ended up using "require()" + // with this same path. The goal of this code is to avoid having + // both the "module" file and the "main" file in the bundle at the + // same time. + if (kind != ast.ImportKind.require) { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text }) catch {}; - if (main_field_values.get("main")) |main_rel_path| { - if (main_rel_path.len > 0) { - absolute_result = r.loadFromMainField(path, dir_info, main_rel_path, "main", extension_order); + debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text}) catch {}; } - } else { - // Some packages have a "module" field without a "main" field but - // still have an implicit "index.js" file. In that case, treat that - // as the value for "main". - absolute_result = r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order); - } - - if (absolute_result) |auto_main_result| { - // If both the "main" and "module" fields exist, use "main" if the - // path is for "require" and "module" if the path is for "import". - // If we're using "module", return enough information to be able to - // fall back to "main" later if something ended up using "require()" - // with this same path. The goal of this code is to avoid having - // both the "module" file and the "main" file in the bundle at the - // same time. - if (kind != ast.ImportKind.require) { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text }) catch {}; - - debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text}) catch {}; - } - return MatchResult{ - .path_pair = .{ - .primary = _result.path_pair.primary, - .secondary = auto_main_result.path_pair.primary, - }, - .diff_case = _result.diff_case, - .dirname_fd = _result.dirname_fd, - .package_json = package_json, - .file_fd = auto_main_result.file_fd, - }; - } else { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Resolved to \"{s}\" using the \"{s}\" field in \"{s}\"", .{ - auto_main_result.path_pair.primary.text, - key, - pkg_json.source.key_path.text, - }) catch {}; - } - var _auto_main_result = auto_main_result; - _auto_main_result.package_json = package_json; - return _auto_main_result; + return MatchResult{ + .path_pair = .{ + .primary = _result.path_pair.primary, + .secondary = auto_main_result.path_pair.primary, + }, + .diff_case = _result.diff_case, + .dirname_fd = _result.dirname_fd, + .package_json = package_json, + .file_fd = auto_main_result.file_fd, + }; + } else { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Resolved to \"{s}\" using the \"{s}\" field in \"{s}\"", .{ + auto_main_result.path_pair.primary.text, + key, + pkg_json.source.key_path.text, + }) catch {}; } + var _auto_main_result = auto_main_result; + _auto_main_result.package_json = package_json; + return _auto_main_result; } } - - _result.package_json = _result.package_json orelse package_json; - return _result; } + + _result.package_json = _result.package_json orelse package_json; + return _result; } } + } + + // Look for an "index" file with known extensions + if (r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order)) |*res| { + res.package_json = res.package_json orelse package_json; + return res.*; + } + + return null; + } + + pub fn loadAsFile(r: *ThisResolver, path: string, extension_order: []const string) ?LoadResult { + var rfs: *Fs.FileSystem.RealFS = &r.fs.fs; - // Look for an "index" file with known extensions - if (r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order)) |*res| { - res.package_json = res.package_json orelse package_json; - return res.*; + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path}) catch {}; + debug.increaseIndent() catch {}; + } + defer { + if (r.debug_logs) |*debug| { + debug.decreaseIndent() catch {}; } + } + const dir_path = Dirname.dirname(path); + + const dir_entry: *Fs.FileSystem.RealFS.EntriesOption = rfs.readDirectory( + dir_path, + null, + ) catch { + return null; + }; + + if (@as(Fs.FileSystem.RealFS.EntriesOption.Tag, dir_entry.*) == .err) { + if (dir_entry.err.original_err != error.ENOENT) { + r.log.addErrorFmt( + null, + logger.Loc.Empty, + r.allocator, + "Cannot read directory \"{s}\": {s}", + .{ + r.prettyPath(Path.init(dir_path)), + @errorName(dir_entry.err.original_err), + }, + ) catch {}; + } return null; } - pub fn loadAsFile(r: *ThisResolver, path: string, extension_order: []const string) ?LoadResult { - var rfs: *Fs.FileSystem.RealFS = &r.fs.fs; + const entries = dir_entry.entries; - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path}) catch {}; - debug.increaseIndent() catch {}; - } - defer { + const base = std.fs.path.basename(path); + + // Try the plain path without any extensions + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Checking for file \"{s}\" ", .{base}) catch {}; + } + + if (entries.get(base)) |query| { + if (query.entry.kind(rfs) == .file) { if (r.debug_logs) |*debug| { - debug.decreaseIndent() catch {}; + debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {}; } - } - const dir_path = Dirname.dirname(path); + const abs_path = brk: { + if (query.entry.abs_path.isEmpty()) { + const abs_path_parts = [_]string{ query.entry.dir, query.entry.base() }; + query.entry.abs_path = PathString.init(r.fs.dirname_store.append(string, r.fs.absBuf(&abs_path_parts, &load_as_file_buf)) catch unreachable); + } - const dir_entry: *Fs.FileSystem.RealFS.EntriesOption = rfs.readDirectory( - dir_path, - null, - ) catch { - return null; - }; + break :brk query.entry.abs_path.slice(); + }; - if (@as(Fs.FileSystem.RealFS.EntriesOption.Tag, dir_entry.*) == .err) { - if (dir_entry.err.original_err != error.ENOENT) { - r.log.addErrorFmt( - null, - logger.Loc.Empty, - r.allocator, - "Cannot read directory \"{s}\": {s}", - .{ - r.prettyPath(Path.init(dir_path)), - @errorName(dir_entry.err.original_err), - }, - ) catch {}; - } - return null; + return LoadResult{ + .path = abs_path, + .diff_case = query.diff_case, + .dirname_fd = entries.fd, + .file_fd = query.entry.cache.fd, + }; } + } - const entries = dir_entry.entries; - - const base = std.fs.path.basename(path); + // Try the path with extensions + std.mem.copy(u8, &load_as_file_buf, path); + for (r.extension_order) |ext| { + var buffer = load_as_file_buf[0 .. path.len + ext.len]; + std.mem.copy(u8, buffer[path.len..buffer.len], ext); + const file_name = buffer[path.len - base.len .. buffer.len]; - // Try the plain path without any extensions if (r.debug_logs) |*debug| { - debug.addNoteFmt("Checking for file \"{s}\" ", .{base}) catch {}; + debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer}) catch {}; } - if (entries.get(base)) |query| { + if (entries.get(file_name)) |query| { if (query.entry.kind(rfs) == .file) { if (r.debug_logs) |*debug| { - debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {}; + debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {}; } - const abs_path = brk: { - if (query.entry.abs_path.isEmpty()) { - const abs_path_parts = [_]string{ query.entry.dir, query.entry.base() }; - query.entry.abs_path = PathString.init(r.fs.dirname_store.append(string, r.fs.absBuf(&abs_path_parts, &load_as_file_buf)) catch unreachable); - } - - break :brk query.entry.abs_path.slice(); - }; - + // now that we've found it, we allocate it. return LoadResult{ - .path = abs_path, + .path = brk: { + query.entry.abs_path = if (query.entry.abs_path.isEmpty()) + PathString.init(r.fs.dirname_store.append(@TypeOf(buffer), buffer) catch unreachable) + else + query.entry.abs_path; + + break :brk query.entry.abs_path.slice(); + }, .diff_case = query.diff_case, .dirname_fd = entries.fd, .file_fd = query.entry.cache.fd, }; } } + } - // Try the path with extensions - std.mem.copy(u8, &load_as_file_buf, path); - for (r.extension_order) |ext| { - var buffer = load_as_file_buf[0 .. path.len + ext.len]; - std.mem.copy(u8, buffer[path.len..buffer.len], ext); - const file_name = buffer[path.len - base.len .. buffer.len]; + // TypeScript-specific behavior: if the extension is ".js" or ".jsx", try + // replacing it with ".ts" or ".tsx". At the time of writing this specific + // behavior comes from the function "loadModuleFromFile()" in the file + // "moduleNameThisResolver.ts" in the TypeScript compiler source code. It + // contains this comment: + // + // If that didn't work, try stripping a ".js" or ".jsx" extension and + // replacing it with a TypeScript one; e.g. "./foo.js" can be matched + // by "./foo.ts" or "./foo.d.ts" + // + // We don't care about ".d.ts" files because we can't do anything with + // those, so we ignore that part of the behavior. + // + // See the discussion here for more historical context: + // https://github.com/microsoft/TypeScript/issues/4595 + if (strings.lastIndexOfChar(base, '.')) |last_dot| { + const ext = base[last_dot..base.len]; + if (strings.eqlComptime(ext, ".js") or strings.eqlComptime(ext, ".jsx")) { + const segment = base[0..last_dot]; + var tail = load_as_file_buf[path.len - base.len ..]; + std.mem.copy(u8, tail, segment); + + const exts = comptime [_]string{ ".ts", ".tsx" }; + + inline for (exts) |ext_to_replace| { + var buffer = tail[0 .. segment.len + ext_to_replace.len]; + std.mem.copy(u8, buffer[segment.len..buffer.len], ext_to_replace); + + if (entries.get(buffer)) |query| { + if (query.entry.kind(rfs) == .file) { + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer}) catch {}; + } - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer}) catch {}; - } + return LoadResult{ + .path = brk: { + if (query.entry.abs_path.isEmpty()) { + // Should already have a trailing slash so we shouldn't need to worry. + var parts = [_]string{ query.entry.dir, buffer }; + query.entry.abs_path = PathString.init(r.fs.filename_store.append(@TypeOf(parts), parts) catch unreachable); + } - if (entries.get(file_name)) |query| { - if (query.entry.kind(rfs) == .file) { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {}; + break :brk query.entry.abs_path.slice(); + }, + .diff_case = query.diff_case, + .dirname_fd = entries.fd, + .file_fd = query.entry.cache.fd, + }; } - - // now that we've found it, we allocate it. - return LoadResult{ - .path = brk: { - query.entry.abs_path = if (query.entry.abs_path.isEmpty()) - PathString.init(r.fs.dirname_store.append(@TypeOf(buffer), buffer) catch unreachable) - else - query.entry.abs_path; - - break :brk query.entry.abs_path.slice(); - }, - .diff_case = query.diff_case, - .dirname_fd = entries.fd, - .file_fd = query.entry.cache.fd, - }; } - } - } - - // TypeScript-specific behavior: if the extension is ".js" or ".jsx", try - // replacing it with ".ts" or ".tsx". At the time of writing this specific - // behavior comes from the function "loadModuleFromFile()" in the file - // "moduleNameThisResolver.ts" in the TypeScript compiler source code. It - // contains this comment: - // - // If that didn't work, try stripping a ".js" or ".jsx" extension and - // replacing it with a TypeScript one; e.g. "./foo.js" can be matched - // by "./foo.ts" or "./foo.d.ts" - // - // We don't care about ".d.ts" files because we can't do anything with - // those, so we ignore that part of the behavior. - // - // See the discussion here for more historical context: - // https://github.com/microsoft/TypeScript/issues/4595 - if (strings.lastIndexOfChar(base, '.')) |last_dot| { - const ext = base[last_dot..base.len]; - if (strings.eqlComptime(ext, ".js") or strings.eqlComptime(ext, ".jsx")) { - const segment = base[0..last_dot]; - var tail = load_as_file_buf[path.len - base.len ..]; - std.mem.copy(u8, tail, segment); - - const exts = comptime [_]string{ ".ts", ".tsx" }; - - inline for (exts) |ext_to_replace| { - var buffer = tail[0 .. segment.len + ext_to_replace.len]; - std.mem.copy(u8, buffer[segment.len..buffer.len], ext_to_replace); - - if (entries.get(buffer)) |query| { - if (query.entry.kind(rfs) == .file) { - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer}) catch {}; - } - - return LoadResult{ - .path = brk: { - if (query.entry.abs_path.isEmpty()) { - // Should already have a trailing slash so we shouldn't need to worry. - var parts = [_]string{ query.entry.dir, buffer }; - query.entry.abs_path = PathString.init(r.fs.filename_store.append(@TypeOf(parts), parts) catch unreachable); - } - - break :brk query.entry.abs_path.slice(); - }, - .diff_case = query.diff_case, - .dirname_fd = entries.fd, - .file_fd = query.entry.cache.fd, - }; - } - } - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base}) catch {}; - } + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base}) catch {}; } } } + } - if (r.debug_logs) |*debug| { - debug.addNoteFmt("Failed to find \"{s}\" ", .{path}) catch {}; - } + if (r.debug_logs) |*debug| { + debug.addNoteFmt("Failed to find \"{s}\" ", .{path}) catch {}; + } - if (comptime FeatureFlags.watch_directories) { - // For existent directories which don't find a match - // Start watching it automatically, - // onStartWatchingDirectory fn decides whether to actually watch. - if (r.onStartWatchingDirectoryCtx) |ctx| { - r.onStartWatchingDirectory.?(ctx, entries.dir, entries.fd); - } + if (comptime FeatureFlags.watch_directories) { + // For existent directories which don't find a match + // Start watching it automatically, + // onStartWatchingDirectory fn decides whether to actually watch. + if (r.onStartWatchingDirectoryCtx) |ctx| { + r.onStartWatchingDirectory.?(ctx, entries.dir, entries.fd); } - return null; } + return null; + } - fn dirInfoUncached( - r: *ThisResolver, - info: *DirInfo, - path: string, - _entries: *Fs.FileSystem.RealFS.EntriesOption, - _result: allocators.Result, - dir_entry_index: allocators.IndexType, - parent: ?*DirInfo, - parent_index: allocators.IndexType, - fd: FileDescriptorType, - ) anyerror!void { - var result = _result; - - var rfs: *Fs.FileSystem.RealFS = &r.fs.fs; - var entries = _entries.entries; - - info.* = DirInfo{ - .abs_path = path, - // .abs_real_path = path, - .parent = parent_index, - .entries = dir_entry_index, - }; + fn dirInfoUncached( + r: *ThisResolver, + info: *DirInfo, + path: string, + _entries: *Fs.FileSystem.RealFS.EntriesOption, + _result: allocators.Result, + dir_entry_index: allocators.IndexType, + parent: ?*DirInfo, + parent_index: allocators.IndexType, + fd: FileDescriptorType, + ) anyerror!void { + var result = _result; + + var rfs: *Fs.FileSystem.RealFS = &r.fs.fs; + var entries = _entries.entries; + + info.* = DirInfo{ + .abs_path = path, + // .abs_real_path = path, + .parent = parent_index, + .entries = dir_entry_index, + }; - // A "node_modules" directory isn't allowed to directly contain another "node_modules" directory - var base = std.fs.path.basename(path); + // A "node_modules" directory isn't allowed to directly contain another "node_modules" directory + var base = std.fs.path.basename(path); - // base must - if (base.len > 1 and base[base.len - 1] == std.fs.path.sep) base = base[0 .. base.len - 1]; + // base must + if (base.len > 1 and base[base.len - 1] == std.fs.path.sep) base = base[0 .. base.len - 1]; - info.is_node_modules = strings.eqlComptime(base, "node_modules"); + info.is_node_modules = strings.eqlComptime(base, "node_modules"); - // if (entries != null) { - if (!info.is_node_modules) { - if (entries.getComptimeQuery("node_modules")) |entry| { - info.has_node_modules = (entry.entry.kind(rfs)) == .dir; - } + // if (entries != null) { + if (!info.is_node_modules) { + if (entries.getComptimeQuery("node_modules")) |entry| { + info.has_node_modules = (entry.entry.kind(rfs)) == .dir; } - // } + } + // } - if (parent != null) { + if (parent != null) { - // Propagate the browser scope into child directories - info.enclosing_browser_scope = parent.?.enclosing_browser_scope; - info.package_json_for_browser_field = parent.?.package_json_for_browser_field; - info.enclosing_tsconfig_json = parent.?.enclosing_tsconfig_json; - info.enclosing_package_json = parent.?.package_json orelse parent.?.enclosing_package_json; + // Propagate the browser scope into child directories + info.enclosing_browser_scope = parent.?.enclosing_browser_scope; + info.package_json_for_browser_field = parent.?.package_json_for_browser_field; + info.enclosing_tsconfig_json = parent.?.enclosing_tsconfig_json; + info.enclosing_package_json = parent.?.package_json orelse parent.?.enclosing_package_json; - // Make sure "absRealPath" is the real path of the directory (resolving any symlinks) - if (!r.opts.preserve_symlinks) { - if (parent.?.getEntries()) |parent_entries| { - if (parent_entries.get(base)) |lookup| { - if (entries.fd != 0 and lookup.entry.cache.fd == 0) lookup.entry.cache.fd = entries.fd; - const entry = lookup.entry; + // Make sure "absRealPath" is the real path of the directory (resolving any symlinks) + if (!r.opts.preserve_symlinks) { + if (parent.?.getEntries()) |parent_entries| { + if (parent_entries.get(base)) |lookup| { + if (entries.fd != 0 and lookup.entry.cache.fd == 0) lookup.entry.cache.fd = entries.fd; + const entry = lookup.entry; - var symlink = entry.symlink(rfs); - if (symlink.len > 0) { - if (r.debug_logs) |*logs| { - try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable); - } - info.abs_real_path = symlink; - } else if (parent.?.abs_real_path.len > 0) { - // this might leak a little i'm not sure - const parts = [_]string{ parent.?.abs_real_path, base }; - symlink = r.fs.dirname_store.append(string, r.fs.absBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable; - - if (r.debug_logs) |*logs| { - try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable); - } - lookup.entry.cache.symlink = PathString.init(symlink); - info.abs_real_path = symlink; + var symlink = entry.symlink(rfs); + if (symlink.len > 0) { + if (r.debug_logs) |*logs| { + try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable); + } + info.abs_real_path = symlink; + } else if (parent.?.abs_real_path.len > 0) { + // this might leak a little i'm not sure + const parts = [_]string{ parent.?.abs_real_path, base }; + symlink = r.fs.dirname_store.append(string, r.fs.absBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable; + + if (r.debug_logs) |*logs| { + try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable); } + lookup.entry.cache.symlink = PathString.init(symlink); + info.abs_real_path = symlink; } } } } + } - // Record if this directory has a package.json file - if (entries.getComptimeQuery("package.json")) |lookup| { - const entry = lookup.entry; - if (entry.kind(rfs) == .file) { - info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null; + // Record if this directory has a package.json file + if (entries.getComptimeQuery("package.json")) |lookup| { + const entry = lookup.entry; + if (entry.kind(rfs) == .file) { + info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null; - if (info.package_json) |pkg| { - if (pkg.browser_map.count() > 0) { - info.enclosing_browser_scope = result.index; - info.package_json_for_browser_field = pkg; - } - info.enclosing_package_json = pkg; + if (info.package_json) |pkg| { + if (pkg.browser_map.count() > 0) { + info.enclosing_browser_scope = result.index; + info.package_json_for_browser_field = pkg; + } + info.enclosing_package_json = pkg; - if (r.debug_logs) |*logs| { - logs.addNoteFmt("Resolved package.json in \"{s}\"", .{ - path, - }) catch unreachable; - } + if (r.debug_logs) |*logs| { + logs.addNoteFmt("Resolved package.json in \"{s}\"", .{ + path, + }) catch unreachable; } } } + } - // Record if this directory has a tsconfig.json or jsconfig.json file - { - var tsconfig_path: ?string = null; - if (r.opts.tsconfig_override == null) { - if (entries.getComptimeQuery("tsconfig.json")) |lookup| { + // Record if this directory has a tsconfig.json or jsconfig.json file + { + var tsconfig_path: ?string = null; + if (r.opts.tsconfig_override == null) { + if (entries.getComptimeQuery("tsconfig.json")) |lookup| { + const entry = lookup.entry; + if (entry.kind(rfs) == .file) { + const parts = [_]string{ path, "tsconfig.json" }; + + tsconfig_path = r.fs.absBuf(&parts, &dir_info_uncached_filename_buf); + } + } + if (tsconfig_path == null) { + if (entries.getComptimeQuery("jsconfig.json")) |lookup| { const entry = lookup.entry; if (entry.kind(rfs) == .file) { - const parts = [_]string{ path, "tsconfig.json" }; - + const parts = [_]string{ path, "jsconfig.json" }; tsconfig_path = r.fs.absBuf(&parts, &dir_info_uncached_filename_buf); } } - if (tsconfig_path == null) { - if (entries.getComptimeQuery("jsconfig.json")) |lookup| { - const entry = lookup.entry; - if (entry.kind(rfs) == .file) { - const parts = [_]string{ path, "jsconfig.json" }; - tsconfig_path = r.fs.absBuf(&parts, &dir_info_uncached_filename_buf); - } - } - } - } else if (parent == null) { - tsconfig_path = r.opts.tsconfig_override.?; } + } else if (parent == null) { + tsconfig_path = r.opts.tsconfig_override.?; + } - if (tsconfig_path) |tsconfigpath| { - info.tsconfig_json = r.parseTSConfig( - tsconfigpath, - if (FeatureFlags.store_file_descriptors) fd else 0, - ) catch |err| brk: { - const pretty = r.prettyPath(Path.init(tsconfigpath)); - - if (err == error.ENOENT) { - r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot find tsconfig file \"{s}\"", .{pretty}) catch unreachable; - } else if (err != error.ParseErrorAlreadyLogged and err != error.IsDir) { - r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot read file \"{s}\": {s}", .{ pretty, @errorName(err) }) catch unreachable; - } - break :brk null; - }; - info.enclosing_tsconfig_json = info.tsconfig_json; - } + if (tsconfig_path) |tsconfigpath| { + info.tsconfig_json = r.parseTSConfig( + tsconfigpath, + if (FeatureFlags.store_file_descriptors) fd else 0, + ) catch |err| brk: { + const pretty = r.prettyPath(Path.init(tsconfigpath)); + + if (err == error.ENOENT) { + r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot find tsconfig file \"{s}\"", .{pretty}) catch unreachable; + } else if (err != error.ParseErrorAlreadyLogged and err != error.IsDir) { + r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot read file \"{s}\": {s}", .{ pretty, @errorName(err) }) catch unreachable; + } + break :brk null; + }; + info.enclosing_tsconfig_json = info.tsconfig_json; } } - }; -} - -pub const Resolver = NewResolver( - true, -); -pub const ResolverUncached = NewResolver( - false, -); + } +}; pub const Dirname = struct { pub fn dirname(path: string) string { |