aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-06-04 16:06:38 -0700
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-06-04 16:06:38 -0700
commit65f4ea1e189aad169efc010052eadf378202e653 (patch)
tree4c7850c5101de23bb691c21e68769174fcc10db8
parent3d827342a57f424add2e62bfe5243fadbaa92600 (diff)
downloadbun-65f4ea1e189aad169efc010052eadf378202e653.tar.gz
bun-65f4ea1e189aad169efc010052eadf378202e653.tar.zst
bun-65f4ea1e189aad169efc010052eadf378202e653.zip
Generate differnet versions of Bundler, Resolver, and Caches at comptime based on whether we're serving over HTTP
Former-commit-id: e1a88527060e187ab21ca8890ea9bce2b999885a
-rw-r--r--src/bundler.zig945
-rw-r--r--src/cache.zig346
-rw-r--r--src/fs.zig35
-rw-r--r--src/http.zig2
-rw-r--r--src/js_printer.zig8
-rw-r--r--src/linker.zig476
-rw-r--r--src/options.zig4
-rw-r--r--src/resolver/package_json.zig4
-rw-r--r--src/resolver/resolver.zig2716
-rw-r--r--src/resolver/tsconfig_json.zig3
10 files changed, 2303 insertions, 2236 deletions
diff --git a/src/bundler.zig b/src/bundler.zig
index e287e6235..2e8c0133c 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -15,7 +15,7 @@ usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const Fs = @import("fs.zig");
const Api = @import("api/schema.zig").Api;
-const Resolver = @import("./resolver/resolver.zig");
+const _resolver = @import("./resolver/resolver.zig");
const sync = @import("sync.zig");
const ThreadPool = sync.ThreadPool;
const ThreadSafeHashMap = @import("./thread_safe_hash_map.zig");
@@ -24,17 +24,18 @@ const allocators = @import("./allocators.zig");
const MimeType = @import("./http/mime_type.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
const runtime = @import("./runtime.zig");
-const Linker = linker.Linker;
const Timer = @import("./timer.zig");
+const DebugLogs = _resolver.DebugLogs;
+
pub const ServeResult = struct {
file: options.OutputFile,
mime_type: MimeType,
};
// const BundleMap =
-pub const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(Resolver.Resolver.Result);
-pub const ResolveQueue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic);
+pub const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(_resolver.Result);
+pub const ResolveQueue = std.fifo.LinearFifo(_resolver.Result, std.fifo.LinearFifoBufferType.Dynamic);
// How it works end-to-end
// 1. Resolve a file path from input using the resolver
@@ -94,543 +95,554 @@ pub const ResolveQueue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.
// 7. IF does match, serve it with that hash as a weak ETag
// 8. This should also just work unprefixed, but that will be served Cache-Control: private, no-store
-pub const Bundler = struct {
- options: options.BundleOptions,
- log: *logger.Log,
- allocator: *std.mem.Allocator,
- result: options.TransformResult = undefined,
- resolver: Resolver.Resolver,
- fs: *Fs.FileSystem,
- // thread_pool: *ThreadPool,
- output_files: std.ArrayList(options.OutputFile),
- resolve_results: *ResolveResults,
- resolve_queue: ResolveQueue,
- elapsed: i128 = 0,
- needs_runtime: bool = false,
- linker: Linker,
- timer: Timer = Timer{},
-
- pub const RuntimeCode = @embedFile("./runtime.js");
+pub const ParseResult = struct {
+ source: logger.Source,
+ loader: options.Loader,
+ ast: js_ast.Ast,
+};
- // to_bundle:
+pub fn NewBundler(cache_files: bool) type {
+ return struct {
+ const Linker = if (cache_files) linker.Linker else linker.ServeLinker;
+ pub const Resolver = if (cache_files) _resolver.Resolver else _resolver.ResolverUncached;
- // thread_pool: *ThreadPool,
+ const ThisBundler = @This();
- pub fn init(
- allocator: *std.mem.Allocator,
+ options: options.BundleOptions,
log: *logger.Log,
- opts: Api.TransformOptions,
- ) !Bundler {
- js_ast.Expr.Data.Store.create(allocator);
- js_ast.Stmt.Data.Store.create(allocator);
- var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false);
- const bundle_options = try options.BundleOptions.fromApi(allocator, fs, log, opts);
-
- // var pool = try allocator.create(ThreadPool);
- // try pool.init(ThreadPool.InitConfig{
- // .allocator = allocator,
- // });
- return Bundler{
- .options = bundle_options,
- .fs = fs,
- .allocator = allocator,
- .resolver = Resolver.Resolver.init1(allocator, log, fs, bundle_options),
- .log = log,
- // .thread_pool = pool,
- .linker = undefined,
- .result = options.TransformResult{ .outbase = bundle_options.output_dir },
- .resolve_results = try ResolveResults.init(allocator),
- .resolve_queue = ResolveQueue.init(allocator),
- .output_files = std.ArrayList(options.OutputFile).init(allocator),
- };
- }
-
- pub fn configureLinker(bundler: *Bundler) void {
- bundler.linker = Linker.init(
- bundler.allocator,
- bundler.log,
- &bundler.resolve_queue,
- &bundler.options,
- &bundler.resolver,
- bundler.resolve_results,
- bundler.fs,
- );
- }
-
- pub fn resetStore(bundler: *Bundler) void {
- js_ast.Expr.Data.Store.reset();
- js_ast.Stmt.Data.Store.reset();
- }
-
- pub fn buildWithResolveResult(
- bundler: *Bundler,
- resolve_result: Resolver.Resolver.Result,
allocator: *std.mem.Allocator,
- loader: options.Loader,
- comptime Writer: type,
- writer: Writer,
- ) !usize {
- if (resolve_result.is_external) {
- return 0;
+ result: options.TransformResult = undefined,
+ resolver: Resolver,
+ fs: *Fs.FileSystem,
+ // thread_pool: *ThreadPool,
+ output_files: std.ArrayList(options.OutputFile),
+ resolve_results: *ResolveResults,
+ resolve_queue: ResolveQueue,
+ elapsed: i128 = 0,
+ needs_runtime: bool = false,
+ linker: Linker,
+ timer: Timer = Timer{},
+
+ pub const RuntimeCode = @embedFile("./runtime.js");
+
+ // to_bundle:
+
+ // thread_pool: *ThreadPool,
+
+ pub fn init(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ opts: Api.TransformOptions,
+ ) !ThisBundler {
+ js_ast.Expr.Data.Store.create(allocator);
+ js_ast.Stmt.Data.Store.create(allocator);
+ var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false);
+ const bundle_options = try options.BundleOptions.fromApi(allocator, fs, log, opts);
+
+ // var pool = try allocator.create(ThreadPool);
+ // try pool.init(ThreadPool.InitConfig{
+ // .allocator = allocator,
+ // });
+ return ThisBundler{
+ .options = bundle_options,
+ .fs = fs,
+ .allocator = allocator,
+ .resolver = Resolver.init1(allocator, log, fs, bundle_options),
+ .log = log,
+ // .thread_pool = pool,
+ .linker = undefined,
+ .result = options.TransformResult{ .outbase = bundle_options.output_dir },
+ .resolve_results = try ResolveResults.init(allocator),
+ .resolve_queue = ResolveQueue.init(allocator),
+ .output_files = std.ArrayList(options.OutputFile).init(allocator),
+ };
}
- errdefer bundler.resetStore();
-
- var file_path = resolve_result.path_pair.primary;
- file_path.pretty = allocator.dupe(u8, bundler.fs.relativeTo(file_path.text)) catch unreachable;
-
- var old_bundler_allocator = bundler.allocator;
- bundler.allocator = allocator;
- defer bundler.allocator = old_bundler_allocator;
- var result = bundler.parse(allocator, file_path, loader, resolve_result.dirname_fd) orelse {
- bundler.resetStore();
- return 0;
- };
- var old_linker_allocator = bundler.linker.allocator;
- defer bundler.linker.allocator = old_linker_allocator;
- bundler.linker.allocator = allocator;
- try bundler.linker.link(file_path, &result);
-
- return try bundler.print(
- result,
- Writer,
- writer,
- );
- // output_file.version = if (resolve_result.is_from_node_modules) resolve_result.package_json_version else null;
-
- }
-
- pub fn buildWithResolveResultEager(bundler: *Bundler, resolve_result: Resolver.Resolver.Result) !?options.OutputFile {
- if (resolve_result.is_external) {
- return null;
+ pub fn configureLinker(bundler: *ThisBundler) void {
+ bundler.linker = Linker.init(
+ bundler.allocator,
+ bundler.log,
+ &bundler.resolve_queue,
+ &bundler.options,
+ &bundler.resolver,
+ bundler.resolve_results,
+ bundler.fs,
+ );
}
- errdefer js_ast.Expr.Data.Store.reset();
- errdefer js_ast.Stmt.Data.Store.reset();
-
- // Step 1. Parse & scan
- const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
- var file_path = resolve_result.path_pair.primary;
- file_path.pretty = Linker.relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
+ pub fn resetStore(bundler: *ThisBundler) void {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ }
- switch (loader) {
- .jsx, .tsx, .js, .json => {
- var result = bundler.parse(bundler.allocator, file_path, loader, resolve_result.dirname_fd) orelse {
- js_ast.Expr.Data.Store.reset();
- js_ast.Stmt.Data.Store.reset();
- return null;
- };
+ pub fn buildWithResolveResult(
+ bundler: *ThisBundler,
+ resolve_result: _resolver.Result,
+ allocator: *std.mem.Allocator,
+ loader: options.Loader,
+ comptime Writer: type,
+ writer: Writer,
+ ) !usize {
+ if (resolve_result.is_external) {
+ return 0;
+ }
- try bundler.linker.link(file_path, &result);
- var output_file = options.OutputFile{
- .input = file_path,
- .loader = loader,
- .value = undefined,
- };
+ errdefer bundler.resetStore();
- const output_dir = bundler.options.output_dir_handle.?;
- if (std.fs.path.dirname(file_path.pretty)) |dirname| {
- try output_dir.makePath(dirname);
- }
+ var file_path = resolve_result.path_pair.primary;
+ file_path.pretty = allocator.dupe(u8, bundler.fs.relativeTo(file_path.text)) catch unreachable;
- var file = try output_dir.createFile(file_path.pretty, .{});
- output_file.size = try bundler.print(
- result,
- js_printer.FileWriter,
- js_printer.NewFileWriter(file),
- );
+ var old_bundler_allocator = bundler.allocator;
+ bundler.allocator = allocator;
+ defer bundler.allocator = old_bundler_allocator;
+ var result = bundler.parse(allocator, file_path, loader, resolve_result.dirname_fd) orelse {
+ bundler.resetStore();
+ return 0;
+ };
+ var old_linker_allocator = bundler.linker.allocator;
+ defer bundler.linker.allocator = old_linker_allocator;
+ bundler.linker.allocator = allocator;
+ try bundler.linker.link(file_path, &result);
+
+ return try bundler.print(
+ result,
+ Writer,
+ writer,
+ );
+ // output_file.version = if (resolve_result.is_from_node_modules) resolve_result.package_json_version else null;
- var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty);
- file_op.dir = output_dir.fd;
- file_op.fd = file.handle;
+ }
- if (bundler.fs.fs.needToCloseFiles()) {
- file.close();
- file_op.fd = 0;
- }
- file_op.is_tmpdir = false;
- output_file.value = .{ .move = file_op };
- return output_file;
- },
- // TODO:
- else => {
+ pub fn buildWithResolveResultEager(bundler: *ThisBundler, resolve_result: _resolver.Result) !?options.OutputFile {
+ if (resolve_result.is_external) {
return null;
- },
- }
- }
+ }
- pub fn print(
- bundler: *Bundler,
- result: ParseResult,
- comptime Writer: type,
- writer: Writer,
- ) !usize {
- const ast = result.ast;
- var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
+ errdefer js_ast.Expr.Data.Store.reset();
+ errdefer js_ast.Stmt.Data.Store.reset();
+
+ // Step 1. Parse & scan
+ const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
+ var file_path = resolve_result.path_pair.primary;
+ file_path.pretty = Linker.relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
+
+ switch (loader) {
+ .jsx, .tsx, .js, .json => {
+ var result = bundler.parse(bundler.allocator, file_path, loader, resolve_result.dirname_fd) orelse {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ return null;
+ };
+
+ try bundler.linker.link(file_path, &result);
+ var output_file = options.OutputFile{
+ .input = file_path,
+ .loader = loader,
+ .value = undefined,
+ };
+
+ const output_dir = bundler.options.output_dir_handle.?;
+ if (std.fs.path.dirname(file_path.pretty)) |dirname| {
+ try output_dir.makePath(dirname);
+ }
- return try js_printer.printAst(
- Writer,
- writer,
- ast,
- js_ast.Symbol.Map.initList(symbols),
- &result.source,
- false,
- js_printer.Options{
- .to_module_ref = Ref.RuntimeRef,
- .externals = ast.externals,
- .runtime_imports = ast.runtime_imports,
- },
- &bundler.linker,
- );
- }
+ var file = try output_dir.createFile(file_path.pretty, .{});
+ output_file.size = try bundler.print(
+ result,
+ js_printer.FileWriter,
+ js_printer.NewFileWriter(file),
+ );
- pub const ParseResult = struct {
- source: logger.Source,
- loader: options.Loader,
- ast: js_ast.Ast,
- };
+ var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty);
+ file_op.dir = output_dir.fd;
+ file_op.fd = file.handle;
- pub fn parse(bundler: *Bundler, allocator: *std.mem.Allocator, path: Fs.Path, loader: options.Loader, dirname_fd: StoredFileDescriptorType) ?ParseResult {
- if (FeatureFlags.tracing) {
- bundler.timer.start();
- }
- defer {
- if (FeatureFlags.tracing) {
- bundler.timer.stop();
- bundler.elapsed += bundler.timer.elapsed;
+ if (bundler.fs.fs.needToCloseFiles()) {
+ file.close();
+ file_op.fd = 0;
+ }
+ file_op.is_tmpdir = false;
+ output_file.value = .{ .move = file_op };
+ return output_file;
+ },
+ // TODO:
+ else => {
+ return null;
+ },
}
}
- var result: ParseResult = undefined;
- const entry = bundler.resolver.caches.fs.readFile(bundler.fs, path.text, dirname_fd) catch return null;
-
- const source = logger.Source.initFile(Fs.File{ .path = path, .contents = entry.contents }, bundler.allocator) catch return null;
- switch (loader) {
- .js, .jsx, .ts, .tsx => {
- var jsx = bundler.options.jsx;
- jsx.parse = loader.isJSX();
- var opts = js_parser.Parser.Options.init(jsx, loader);
- const value = (bundler.resolver.caches.js.parse(allocator, opts, bundler.options.define, bundler.log, &source) catch null) orelse return null;
- return ParseResult{
- .ast = value,
- .source = source,
- .loader = loader,
- };
- },
- .json => {
- var expr = json_parser.ParseJSON(&source, bundler.log, allocator) catch return null;
- var stmt = js_ast.Stmt.alloc(allocator, js_ast.S.ExportDefault{
- .value = js_ast.StmtOrExpr{ .expr = expr },
- .default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
- }, logger.Loc{ .start = 0 });
- var stmts = allocator.alloc(js_ast.Stmt, 1) catch unreachable;
- stmts[0] = stmt;
- var parts = allocator.alloc(js_ast.Part, 1) catch unreachable;
- parts[0] = js_ast.Part{ .stmts = stmts };
-
- return ParseResult{
- .ast = js_ast.Ast.initTest(parts),
- .source = source,
- .loader = loader,
- };
- },
- .css => {
- return null;
- },
- else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }),
+ pub fn print(
+ bundler: *ThisBundler,
+ result: ParseResult,
+ comptime Writer: type,
+ writer: Writer,
+ ) !usize {
+ const ast = result.ast;
+ var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
+
+ return try js_printer.printAst(
+ Writer,
+ writer,
+ ast,
+ js_ast.Symbol.Map.initList(symbols),
+ &result.source,
+ false,
+ js_printer.Options{
+ .to_module_ref = Ref.RuntimeRef,
+ .externals = ast.externals,
+ .runtime_imports = ast.runtime_imports,
+ },
+ Linker,
+ &bundler.linker,
+ );
}
- return null;
- }
+ pub fn parse(bundler: *ThisBundler, allocator: *std.mem.Allocator, path: Fs.Path, loader: options.Loader, dirname_fd: StoredFileDescriptorType) ?ParseResult {
+ if (FeatureFlags.tracing) {
+ bundler.timer.start();
+ }
+ defer {
+ if (FeatureFlags.tracing) {
+ bundler.timer.stop();
+ bundler.elapsed += bundler.timer.elapsed;
+ }
+ }
+ var result: ParseResult = undefined;
+ const entry = bundler.resolver.caches.fs.readFile(bundler.fs, path.text, dirname_fd, !cache_files) catch return null;
+
+ const source = logger.Source.initFile(Fs.File{ .path = path, .contents = entry.contents }, bundler.allocator) catch return null;
+
+ switch (loader) {
+ .js, .jsx, .ts, .tsx => {
+ var jsx = bundler.options.jsx;
+ jsx.parse = loader.isJSX();
+ var opts = js_parser.Parser.Options.init(jsx, loader);
+ const value = (bundler.resolver.caches.js.parse(allocator, opts, bundler.options.define, bundler.log, &source) catch null) orelse return null;
+ return ParseResult{
+ .ast = value,
+ .source = source,
+ .loader = loader,
+ };
+ },
+ .json => {
+ var expr = json_parser.ParseJSON(&source, bundler.log, allocator) catch return null;
+ var stmt = js_ast.Stmt.alloc(allocator, js_ast.S.ExportDefault{
+ .value = js_ast.StmtOrExpr{ .expr = expr },
+ .default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
+ }, logger.Loc{ .start = 0 });
+ var stmts = allocator.alloc(js_ast.Stmt, 1) catch unreachable;
+ stmts[0] = stmt;
+ var parts = allocator.alloc(js_ast.Part, 1) catch unreachable;
+ parts[0] = js_ast.Part{ .stmts = stmts };
+
+ return ParseResult{
+ .ast = js_ast.Ast.initTest(parts),
+ .source = source,
+ .loader = loader,
+ };
+ },
+ .css => {
+ return null;
+ },
+ else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }),
+ }
- pub fn buildServeResultOutput(bundler: *Bundler, resolve: Resolver.Resolver.Result, loader: options.Loader) !ServeResult.Output {
- switch (loader) {
- .js, .jsx, .ts, .tsx, .json => {
- return ServeResult.Output{ .built = bundler.buildWithResolveResult(resolve) orelse error.BuildFailed };
- },
- else => {
- return ServeResult.Output{ .file = ServeResult.Output.File{ .absolute_path = resolve.path_pair.primary.text } };
- },
+ return null;
}
- }
- threadlocal var tmp_buildfile_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ pub fn buildServeResultOutput(bundler: *ThisBundler, resolve: _resolver.Result, loader: options.Loader) !ServeResult.Output {
+ switch (loader) {
+ .js, .jsx, .ts, .tsx, .json => {
+ return ServeResult.Output{ .built = bundler.buildWithResolveResult(resolve) orelse error.BuildFailed };
+ },
+ else => {
+ return ServeResult.Output{ .file = ServeResult.Output.File{ .absolute_path = resolve.path_pair.primary.text } };
+ },
+ }
+ }
- // We try to be mostly stateless when serving
- // This means we need a slightly different resolver setup
- // Essentially:
- pub fn buildFile(
- bundler: *Bundler,
- log: *logger.Log,
- allocator: *std.mem.Allocator,
- relative_path: string,
- _extension: string,
- ) !ServeResult {
- var extension = _extension;
- var original_resolver_logger = bundler.resolver.log;
- var original_bundler_logger = bundler.log;
-
- defer bundler.log = original_bundler_logger;
- defer bundler.resolver.log = original_resolver_logger;
- bundler.log = log;
- bundler.linker.allocator = allocator;
- bundler.resolver.log = log;
-
- // Resolving a public file has special behavior
- if (bundler.options.public_dir_enabled) {
- // On Windows, we don't keep the directory handle open forever because Windows doesn't like that.
- const public_dir: std.fs.Dir = bundler.options.public_dir_handle orelse std.fs.openDirAbsolute(bundler.options.public_dir, .{}) catch |err| {
- log.addErrorFmt(null, logger.Loc.Empty, allocator, "Opening public directory failed: {s}", .{@errorName(err)}) catch unreachable;
- Output.printErrorln("Opening public directory failed: {s}", .{@errorName(err)});
- bundler.options.public_dir_enabled = false;
- return error.PublicDirError;
- };
+ threadlocal var tmp_buildfile_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+
+ // We try to be mostly stateless when serving
+ // This means we need a slightly different resolver setup
+ // Essentially:
+ pub fn buildFile(
+ bundler: *ThisBundler,
+ log: *logger.Log,
+ allocator: *std.mem.Allocator,
+ relative_path: string,
+ _extension: string,
+ ) !ServeResult {
+ var extension = _extension;
+ var original_resolver_logger = bundler.resolver.log;
+ var original_bundler_logger = bundler.log;
+
+ defer bundler.log = original_bundler_logger;
+ defer bundler.resolver.log = original_resolver_logger;
+ bundler.log = log;
+ bundler.linker.allocator = allocator;
+ bundler.resolver.log = log;
+
+ // Resolving a public file has special behavior
+ if (bundler.options.public_dir_enabled) {
+ // On Windows, we don't keep the directory handle open forever because Windows doesn't like that.
+ const public_dir: std.fs.Dir = bundler.options.public_dir_handle orelse std.fs.openDirAbsolute(bundler.options.public_dir, .{}) catch |err| {
+ log.addErrorFmt(null, logger.Loc.Empty, allocator, "Opening public directory failed: {s}", .{@errorName(err)}) catch unreachable;
+ Output.printErrorln("Opening public directory failed: {s}", .{@errorName(err)});
+ bundler.options.public_dir_enabled = false;
+ return error.PublicDirError;
+ };
- var relative_unrooted_path: []u8 = resolve_path.normalizeString(relative_path, false, .auto);
-
- var _file: ?std.fs.File = null;
-
- // Is it the index file?
- if (relative_unrooted_path.len == 0) {
- // std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path);
- // std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/"
- // Search for /index.html
- if (public_dir.openFile("index.html", .{})) |file| {
- var index_path = "index.html".*;
- relative_unrooted_path = &(index_path);
- _file = file;
- extension = "html";
- } else |err| {}
- // Okay is it actually a full path?
- } else {
- if (public_dir.openFile(relative_unrooted_path, .{})) |file| {
- _file = file;
- } else |err| {}
- }
+ var relative_unrooted_path: []u8 = resolve_path.normalizeString(relative_path, false, .auto);
- // Try some weird stuff.
- while (_file == null and relative_unrooted_path.len > 1) {
- // When no extension is provided, it might be html
- if (extension.len == 0) {
- std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]);
- std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], ".html");
+ var _file: ?std.fs.File = null;
- if (public_dir.openFile(tmp_buildfile_buf[0 .. relative_unrooted_path.len + ".html".len], .{})) |file| {
+ // Is it the index file?
+ if (relative_unrooted_path.len == 0) {
+ // std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path);
+ // std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/"
+ // Search for /index.html
+ if (public_dir.openFile("index.html", .{})) |file| {
+ var index_path = "index.html".*;
+ relative_unrooted_path = &(index_path);
_file = file;
extension = "html";
- break;
} else |err| {}
+ // Okay is it actually a full path?
+ } else {
+ if (public_dir.openFile(relative_unrooted_path, .{})) |file| {
+ _file = file;
+ } else |err| {}
+ }
- var _path: []u8 = undefined;
- if (relative_unrooted_path[relative_unrooted_path.len - 1] == '/') {
- std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0 .. relative_unrooted_path.len - 1]);
- std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len - 1 ..], "/index.html");
- _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len - 1 + "/index.html".len];
- } else {
+ // Try some weird stuff.
+ while (_file == null and relative_unrooted_path.len > 1) {
+ // When no extension is provided, it might be html
+ if (extension.len == 0) {
std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]);
- std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/index.html");
-
- _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len + "/index.html".len];
+ std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], ".html");
+
+ if (public_dir.openFile(tmp_buildfile_buf[0 .. relative_unrooted_path.len + ".html".len], .{})) |file| {
+ _file = file;
+ extension = "html";
+ break;
+ } else |err| {}
+
+ var _path: []u8 = undefined;
+ if (relative_unrooted_path[relative_unrooted_path.len - 1] == '/') {
+ std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0 .. relative_unrooted_path.len - 1]);
+ std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len - 1 ..], "/index.html");
+ _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len - 1 + "/index.html".len];
+ } else {
+ std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]);
+ std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/index.html");
+
+ _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len + "/index.html".len];
+ }
+
+ if (public_dir.openFile(_path, .{})) |file| {
+ const __path = _path;
+ relative_unrooted_path = __path;
+ extension = "html";
+ _file = file;
+ break;
+ } else |err| {}
}
- if (public_dir.openFile(_path, .{})) |file| {
- const __path = _path;
- relative_unrooted_path = __path;
- extension = "html";
- _file = file;
- break;
- } else |err| {}
+ break;
}
- break;
- }
+ if (_file) |*file| {
+ var stat = try file.stat();
+ var absolute_path = resolve_path.joinAbs(bundler.options.public_dir, .auto, relative_unrooted_path);
- if (_file) |*file| {
- var stat = try file.stat();
- var absolute_path = resolve_path.joinAbs(bundler.options.public_dir, .auto, relative_unrooted_path);
+ if (stat.kind == .SymLink) {
+ absolute_path = try std.fs.realpath(absolute_path, &tmp_buildfile_buf);
+ file.close();
+ file.* = try std.fs.openFileAbsolute(absolute_path, .{ .read = true });
+ stat = try file.stat();
+ }
- if (stat.kind == .SymLink) {
- absolute_path = try std.fs.realpath(absolute_path, &tmp_buildfile_buf);
- file.close();
- file.* = try std.fs.openFileAbsolute(absolute_path, .{ .read = true });
- stat = try file.stat();
- }
+ if (stat.kind != .File) {
+ file.close();
+ return error.NotFile;
+ }
- if (stat.kind != .File) {
- file.close();
- return error.NotFile;
+ return ServeResult{
+ .file = options.OutputFile.initFile(file.*, absolute_path, stat.size),
+ .mime_type = MimeType.byExtension(std.fs.path.extension(absolute_path)[1..]),
+ };
}
+ }
+ if (strings.eqlComptime(relative_path, "__runtime.js")) {
return ServeResult{
- .file = options.OutputFile.initFile(file.*, absolute_path, stat.size),
- .mime_type = MimeType.byExtension(std.fs.path.extension(absolute_path)[1..]),
+ .file = options.OutputFile.initBuf(runtime.SourceContent, "__runtime.js", .js),
+ .mime_type = MimeType.javascript,
};
}
- }
-
- if (strings.eqlComptime(relative_path, "__runtime.js")) {
- return ServeResult{
- .file = options.OutputFile.initBuf(runtime.SourceContent, "__runtime.js", .js),
- .mime_type = MimeType.javascript,
- };
- }
- // We make some things faster in theory by using absolute paths instead of relative paths
- var absolute_path = resolve_path.joinAbsStringBuf(
- bundler.fs.top_level_dir,
- &tmp_buildfile_buf,
- &([_][]const u8{relative_path}),
- .auto,
- );
+ // We make some things faster in theory by using absolute paths instead of relative paths
+ var absolute_path = resolve_path.joinAbsStringBuf(
+ bundler.fs.top_level_dir,
+ &tmp_buildfile_buf,
+ &([_][]const u8{relative_path}),
+ .auto,
+ );
- defer {
- js_ast.Expr.Data.Store.reset();
- js_ast.Stmt.Data.Store.reset();
- }
+ defer {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ }
- // If the extension is .js, omit it.
- // if (absolute_path.len > ".js".len and strings.eqlComptime(absolute_path[absolute_path.len - ".js".len ..], ".js")) {
- // absolute_path = absolute_path[0 .. absolute_path.len - ".js".len];
- // }
+ // If the extension is .js, omit it.
+ // if (absolute_path.len > ".js".len and strings.eqlComptime(absolute_path[absolute_path.len - ".js".len ..], ".js")) {
+ // absolute_path = absolute_path[0 .. absolute_path.len - ".js".len];
+ // }
- const resolved = (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .entry_point));
+ const resolved = (try bundler.resolver.resolve(bundler.fs.top_level_dir, absolute_path, .entry_point));
- const loader = bundler.options.loaders.get(resolved.path_pair.primary.name.ext) orelse .file;
+ const loader = bundler.options.loaders.get(resolved.path_pair.primary.name.ext) orelse .file;
- switch (loader) {
- .js, .jsx, .ts, .tsx, .json => {
- return ServeResult{
- .file = options.OutputFile.initPending(loader, resolved),
- .mime_type = MimeType.byLoader(
- loader,
- bundler.options.out_extensions.get(resolved.path_pair.primary.name.ext) orelse resolved.path_pair.primary.name.ext,
- ),
- };
- },
- else => {
- var abs_path = resolved.path_pair.primary.text;
- const file = try std.fs.openFileAbsolute(abs_path, .{ .read = true });
- var stat = try file.stat();
- return ServeResult{
- .file = options.OutputFile.initFile(file, abs_path, stat.size),
- .mime_type = MimeType.byLoader(loader, abs_path),
- };
- },
- }
- }
-
- pub fn bundle(
- allocator: *std.mem.Allocator,
- log: *logger.Log,
- opts: Api.TransformOptions,
- ) !options.TransformResult {
- var bundler = try Bundler.init(allocator, log, opts);
- bundler.configureLinker();
-
- if (bundler.options.write and bundler.options.output_dir.len > 0) {}
-
- // 100.00 µs std.fifo.LinearFifo(resolver.resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity
- if (bundler.options.resolve_mode != .lazy) {
- try bundler.resolve_queue.ensureUnusedCapacity(24);
+ switch (loader) {
+ .js, .jsx, .ts, .tsx, .json => {
+ return ServeResult{
+ .file = options.OutputFile.initPending(loader, resolved),
+ .mime_type = MimeType.byLoader(
+ loader,
+ bundler.options.out_extensions.get(resolved.path_pair.primary.name.ext) orelse resolved.path_pair.primary.name.ext,
+ ),
+ };
+ },
+ else => {
+ var abs_path = resolved.path_pair.primary.text;
+ const file = try std.fs.openFileAbsolute(abs_path, .{ .read = true });
+ var stat = try file.stat();
+ return ServeResult{
+ .file = options.OutputFile.initFile(file, abs_path, stat.size),
+ .mime_type = MimeType.byLoader(loader, abs_path),
+ };
+ },
+ }
}
- var entry_points = try allocator.alloc(Resolver.Resolver.Result, bundler.options.entry_points.len);
+ pub fn bundle(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ opts: Api.TransformOptions,
+ ) !options.TransformResult {
+ var bundler = try ThisBundler.init(allocator, log, opts);
+ bundler.configureLinker();
- if (isDebug) {
- log.level = .verbose;
- bundler.resolver.debug_logs = try Resolver.Resolver.DebugLogs.init(allocator);
- }
+ if (bundler.options.write and bundler.options.output_dir.len > 0) {}
- var rfs: *Fs.FileSystem.RealFS = &bundler.fs.fs;
-
- var entry_point_i: usize = 0;
- for (bundler.options.entry_points) |_entry| {
- var entry: string = _entry;
-
- if (!strings.startsWith(entry, "./")) {
- // Entry point paths without a leading "./" are interpreted as package
- // paths. This happens because they go through general path resolution
- // like all other import paths so that plugins can run on them. Requiring
- // a leading "./" for a relative path simplifies writing plugins because
- // entry points aren't a special case.
- //
- // However, requiring a leading "./" also breaks backward compatibility
- // and makes working with the CLI more difficult. So attempt to insert
- // "./" automatically when needed. We don't want to unconditionally insert
- // a leading "./" because the path may not be a file system path. For
- // example, it may be a URL. So only insert a leading "./" when the path
- // is an exact match for an existing file.
- var __entry = allocator.alloc(u8, "./".len + entry.len) catch unreachable;
- __entry[0] = '.';
- __entry[1] = '/';
- std.mem.copy(u8, __entry[2..__entry.len], entry);
- entry = __entry;
+ // 100.00 µs std.fifo.LinearFifo(resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity
+ if (bundler.options.resolve_mode != .lazy) {
+ try bundler.resolve_queue.ensureUnusedCapacity(24);
}
- defer {
- js_ast.Expr.Data.Store.reset();
- js_ast.Stmt.Data.Store.reset();
- }
-
- const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| {
- Output.printError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) });
- continue;
- };
-
- const key = result.path_pair.primary.text;
- if (bundler.resolve_results.contains(key)) {
- continue;
- }
- try bundler.resolve_results.put(key, result);
- entry_points[entry_point_i] = result;
+ var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len);
if (isDebug) {
- Output.print("Resolved {s} => {s}", .{ entry, result.path_pair.primary.text });
+ log.level = .verbose;
+ bundler.resolver.debug_logs = try DebugLogs.init(allocator);
}
- entry_point_i += 1;
- bundler.resolve_queue.writeItem(result) catch unreachable;
- }
+ var rfs: *Fs.FileSystem.RealFS = &bundler.fs.fs;
+
+ var entry_point_i: usize = 0;
+ for (bundler.options.entry_points) |_entry| {
+ var entry: string = _entry;
+
+ if (!strings.startsWith(entry, "./")) {
+ // Entry point paths without a leading "./" are interpreted as package
+ // paths. This happens because they go through general path resolution
+ // like all other import paths so that plugins can run on them. Requiring
+ // a leading "./" for a relative path simplifies writing plugins because
+ // entry points aren't a special case.
+ //
+ // However, requiring a leading "./" also breaks backward compatibility
+ // and makes working with the CLI more difficult. So attempt to insert
+ // "./" automatically when needed. We don't want to unconditionally insert
+ // a leading "./" because the path may not be a file system path. For
+ // example, it may be a URL. So only insert a leading "./" when the path
+ // is an exact match for an existing file.
+ var __entry = allocator.alloc(u8, "./".len + entry.len) catch unreachable;
+ __entry[0] = '.';
+ __entry[1] = '/';
+ std.mem.copy(u8, __entry[2..__entry.len], entry);
+ entry = __entry;
+ }
- switch (bundler.options.resolve_mode) {
- .lazy, .dev, .bundle => {
- while (bundler.resolve_queue.readItem()) |item| {
+ defer {
js_ast.Expr.Data.Store.reset();
js_ast.Stmt.Data.Store.reset();
- const output_file = bundler.buildWithResolveResultEager(item) catch continue orelse continue;
- bundler.output_files.append(output_file) catch unreachable;
}
- },
- else => Global.panic("Unsupported resolve mode: {s}", .{@tagName(bundler.options.resolve_mode)}),
- }
- // if (log.level == .verbose) {
- // for (log.msgs.items) |msg| {
- // try msg.writeFormat(std.io.getStdOut().writer());
- // }
- // }
+ const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| {
+ Output.printError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) });
+ continue;
+ };
- if (bundler.linker.any_needs_runtime) {
- try bundler.output_files.append(
- options.OutputFile.initBuf(runtime.SourceContent, bundler.linker.runtime_source_path, .js),
- );
- }
+ const key = result.path_pair.primary.text;
+ if (bundler.resolve_results.contains(key)) {
+ continue;
+ }
+ try bundler.resolve_results.put(key, result);
+ entry_points[entry_point_i] = result;
- if (FeatureFlags.tracing) {
- Output.printError(
- "\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n",
- .{
- bundler.resolver.elapsed,
- bundler.elapsed,
+ if (isDebug) {
+ Output.print("Resolved {s} => {s}", .{ entry, result.path_pair.primary.text });
+ }
+
+ entry_point_i += 1;
+ bundler.resolve_queue.writeItem(result) catch unreachable;
+ }
+
+ switch (bundler.options.resolve_mode) {
+ .lazy, .dev, .bundle => {
+ while (bundler.resolve_queue.readItem()) |item| {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ const output_file = bundler.buildWithResolveResultEager(item) catch continue orelse continue;
+ bundler.output_files.append(output_file) catch unreachable;
+ }
},
- );
+ else => Global.panic("Unsupported resolve mode: {s}", .{@tagName(bundler.options.resolve_mode)}),
+ }
+
+ // if (log.level == .verbose) {
+ // for (log.msgs.items) |msg| {
+ // try msg.writeFormat(std.io.getStdOut().writer());
+ // }
+ // }
+
+ if (bundler.linker.any_needs_runtime) {
+ try bundler.output_files.append(
+ options.OutputFile.initBuf(runtime.SourceContent, bundler.linker.runtime_source_path, .js),
+ );
+ }
+
+ if (FeatureFlags.tracing) {
+ Output.printError(
+ "\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n",
+ .{
+ bundler.resolver.elapsed,
+ bundler.elapsed,
+ },
+ );
+ }
+
+ var final_result = try options.TransformResult.init(try allocator.dupe(u8, bundler.result.outbase), bundler.output_files.toOwnedSlice(), log, allocator);
+ final_result.root_dir = bundler.options.output_dir_handle;
+ return final_result;
}
+ };
+}
- var final_result = try options.TransformResult.init(try allocator.dupe(u8, bundler.result.outbase), bundler.output_files.toOwnedSlice(), log, allocator);
- final_result.root_dir = bundler.options.output_dir_handle;
- return final_result;
- }
-};
+pub const Bundler = NewBundler(true);
+pub const ServeBundler = NewBundler(false);
pub const Transformer = struct {
opts: Api.TransformOptions,
@@ -854,6 +866,7 @@ pub const Transformer = struct {
.transform_imports = false,
.runtime_imports = ast.runtime_imports,
},
+ u1,
null,
);
},
diff --git a/src/cache.zig b/src/cache.zig
index a25969668..668ceba1a 100644
--- a/src/cache.zig
+++ b/src/cache.zig
@@ -11,203 +11,215 @@ const fs = @import("./fs.zig");
const sync = @import("sync.zig");
const Mutex = sync.Mutex;
-pub const Cache = struct {
- pub const Set = struct {
- js: JavaScript,
- fs: Fs,
- json: Json,
-
- pub fn init(allocator: *std.mem.Allocator) Set {
- return Set{
- .js = JavaScript.init(allocator),
- .fs = Fs{
- .mutex = Mutex.init(),
- .entries = std.StringHashMap(Fs.Entry).init(allocator),
- },
- .json = Json{
- .mutex = Mutex.init(),
- .entries = std.StringHashMap(*Json.Entry).init(allocator),
- },
- };
- }
- };
- pub const Fs = struct {
- mutex: Mutex,
- entries: std.StringHashMap(Entry),
-
- pub const Entry = struct {
- contents: string,
- fd: StoredFileDescriptorType = 0,
- // Null means its not usable
- mod_key: ?fs.FileSystem.Implementation.ModKey = null,
-
- pub fn deinit(entry: *Entry, allocator: *std.mem.Allocator) void {
- if (entry.contents.len > 0) {
- allocator.free(entry.contents);
- entry.contents = "";
- }
+pub fn NewCache(comptime cache_files: bool) type {
+ return struct {
+ pub const Set = struct {
+ js: JavaScript,
+ fs: Fs,
+ json: Json,
+
+ pub fn init(allocator: *std.mem.Allocator) Set {
+ return Set{
+ .js = JavaScript.init(allocator),
+ .fs = Fs{
+ .mutex = Mutex.init(),
+ .entries = std.StringHashMap(Fs.Entry).init(allocator),
+ .shared_buffer = MutableString.init(allocator, 0) catch unreachable,
+ },
+ .json = Json{
+ .mutex = Mutex.init(),
+ .entries = std.StringHashMap(*Json.Entry).init(allocator),
+ },
+ };
}
};
+ pub const Fs = struct {
+ mutex: Mutex,
+ entries: std.StringHashMap(Entry),
+ shared_buffer: MutableString,
+
+ pub const Entry = struct {
+ contents: string,
+ fd: StoredFileDescriptorType = 0,
+ // Null means its not usable
+ mod_key: ?fs.FileSystem.Implementation.ModKey = null,
+
+ pub fn deinit(entry: *Entry, allocator: *std.mem.Allocator) void {
+ if (entry.contents.len > 0) {
+ allocator.free(entry.contents);
+ entry.contents = "";
+ }
+ }
+ };
- pub fn deinit(c: *Fs) void {
- var iter = c.entries.iterator();
- while (iter.next()) |entry| {
- entry.value.deinit(c.entries.allocator);
+ pub fn deinit(c: *Fs) void {
+ var iter = c.entries.iterator();
+ while (iter.next()) |entry| {
+ entry.value.deinit(c.entries.allocator);
+ }
+ c.entries.deinit();
}
- c.entries.deinit();
- }
- pub fn readFile(c: *Fs, _fs: *fs.FileSystem, path: string, dirname_fd: StoredFileDescriptorType) !Entry {
- var rfs = _fs.fs;
+ pub fn readFile(c: *Fs, _fs: *fs.FileSystem, path: string, dirname_fd: StoredFileDescriptorType, comptime use_shared_buffer: bool) !Entry {
+ var rfs = _fs.fs;
- {
- c.mutex.lock();
- defer c.mutex.unlock();
- if (c.entries.get(path)) |entry| {
- return entry;
+ if (cache_files) {
+ {
+ c.mutex.lock();
+ defer c.mutex.unlock();
+ if (c.entries.get(path)) |entry| {
+ return entry;
+ }
+ }
}
- }
- var file_handle: std.fs.File = undefined;
+ var file_handle: std.fs.File = undefined;
- if (FeatureFlags.store_file_descriptors and dirname_fd > 0) {
- file_handle = try std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true });
- } else {
- file_handle = try std.fs.openFileAbsolute(path, .{ .read = true });
- }
+ if (FeatureFlags.store_file_descriptors and dirname_fd > 0) {
+ file_handle = try std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true });
+ } else {
+ file_handle = try std.fs.openFileAbsolute(path, .{ .read = true });
+ }
- defer {
- if (rfs.needToCloseFiles()) {
- file_handle.close();
+ defer {
+ if (rfs.needToCloseFiles()) {
+ file_handle.close();
+ }
}
- }
- // If the file's modification key hasn't changed since it was cached, assume
- // the contents of the file are also the same and skip reading the file.
- var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: {
- switch (err) {
- error.FileNotFound, error.AccessDenied => {
+ // If the file's modification key hasn't changed since it was cached, assume
+ // the contents of the file are also the same and skip reading the file.
+ var mod_key: ?fs.FileSystem.Implementation.ModKey = rfs.modKeyWithFile(path, file_handle) catch |err| handler: {
+ switch (err) {
+ error.FileNotFound, error.AccessDenied => {
+ return err;
+ },
+ else => {
+ if (isDebug) {
+ Output.printError("modkey error: {s}", .{@errorName(err)});
+ }
+ break :handler null;
+ },
+ }
+ };
+
+ var file: fs.File = undefined;
+ if (mod_key) |modk| {
+ file = rfs.readFileWithHandle(path, modk.size, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| {
+ if (isDebug) {
+ Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
+ }
return err;
- },
- else => {
+ };
+ } else {
+ file = rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, &c.shared_buffer) catch |err| {
if (isDebug) {
- Output.printError("modkey error: {s}", .{@errorName(err)});
+ Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
}
- break :handler null;
- },
+ return err;
+ };
}
- };
- var file: fs.File = undefined;
- if (mod_key) |modk| {
- file = rfs.readFileWithHandle(path, modk.size, file_handle) catch |err| {
- if (isDebug) {
- Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
- }
- return err;
+ const entry = Entry{
+ .contents = file.contents,
+ .mod_key = mod_key,
+ .fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
};
- } else {
- file = rfs.readFileWithHandle(path, null, file_handle) catch |err| {
- if (isDebug) {
- Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
+
+ if (cache_files) {
+ c.mutex.lock();
+ defer c.mutex.unlock();
+ var res = c.entries.getOrPut(path) catch unreachable;
+
+ if (res.found_existing) {
+ res.entry.value.deinit(c.entries.allocator);
}
- return err;
- };
+ res.entry.value = entry;
+ return res.entry.value;
+ } else {
+ return entry;
+ }
}
+ };
- const entry = Entry{
- .contents = file.contents,
- .mod_key = mod_key,
- .fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
+ pub const Css = struct {
+ pub const Entry = struct {};
+ pub const Result = struct {
+ ok: bool,
+ value: void,
};
+ pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result {
+ Global.notimpl();
+ }
+ };
+
+ pub const JavaScript = struct {
+ mutex: Mutex,
+ entries: std.StringHashMap(Result),
- c.mutex.lock();
- defer c.mutex.unlock();
- var res = c.entries.getOrPut(path) catch unreachable;
+ pub const Result = js_ast.Result;
- if (res.found_existing) {
- res.entry.value.deinit(c.entries.allocator);
+ pub fn init(allocator: *std.mem.Allocator) JavaScript {
+ return JavaScript{ .mutex = Mutex.init(), .entries = std.StringHashMap(Result).init(allocator) };
}
+ // For now, we're not going to cache JavaScript ASTs.
+ // It's probably only relevant when bundling for production.
+ pub fn parse(
+ cache: *@This(),
+ allocator: *std.mem.Allocator,
+ opts: js_parser.Parser.Options,
+ defines: *Define,
+ log: *logger.Log,
+ source: *const logger.Source,
+ ) anyerror!?js_ast.Ast {
+ var temp_log = logger.Log.init(allocator);
+ defer temp_log.appendTo(log) catch {};
+
+ var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
+ return null;
+ };
- res.entry.value = entry;
- return res.entry.value;
- }
- };
+ const result = try parser.parse();
- pub const Css = struct {
- pub const Entry = struct {};
- pub const Result = struct {
- ok: bool,
- value: void,
+ return if (result.ok) result.ast else null;
+ }
};
- pub fn parse(cache: *@This(), log: *logger.Log, source: logger.Source) !Result {
- Global.notimpl();
- }
- };
- pub const JavaScript = struct {
- mutex: Mutex,
- entries: std.StringHashMap(Result),
-
- pub const Result = js_ast.Result;
-
- pub fn init(allocator: *std.mem.Allocator) JavaScript {
- return JavaScript{ .mutex = Mutex.init(), .entries = std.StringHashMap(Result).init(allocator) };
- }
- // For now, we're not going to cache JavaScript ASTs.
- // It's probably only relevant when bundling for production.
- pub fn parse(
- cache: *@This(),
- allocator: *std.mem.Allocator,
- opts: js_parser.Parser.Options,
- defines: *Define,
- log: *logger.Log,
- source: *const logger.Source,
- ) anyerror!?js_ast.Ast {
- var temp_log = logger.Log.init(allocator);
- defer temp_log.appendTo(log) catch {};
-
- var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
- return null;
+ pub const Json = struct {
+ pub const Entry = struct {
+ is_tsconfig: bool = false,
+ source: logger.Source,
+ expr: ?js_ast.Expr = null,
+ ok: bool = false,
+ // msgs: []logger.Msg,
};
+ mutex: Mutex,
+ entries: std.StringHashMap(*Entry),
+ pub fn init(allocator: *std.mem.Allocator) Json {
+ return Json{
+ .mutex = Mutex.init(),
+ .entries = std.StringHashMap(Entry).init(allocator),
+ };
+ }
+ fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr {
+ var temp_log = logger.Log.init(allocator);
+ defer {
+ temp_log.appendTo(log) catch {};
+ }
+ return func(&source, &temp_log, allocator) catch handler: {
+ break :handler null;
+ };
+ }
+ pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
+ return try parse(cache, log, source, allocator, false, json_parser.ParseJSON);
+ }
- const result = try parser.parse();
-
- return if (result.ok) result.ast else null;
- }
- };
-
- pub const Json = struct {
- pub const Entry = struct {
- is_tsconfig: bool = false,
- source: logger.Source,
- expr: ?js_ast.Expr = null,
- ok: bool = false,
- // msgs: []logger.Msg,
- };
- mutex: Mutex,
- entries: std.StringHashMap(*Entry),
- pub fn init(allocator: *std.mem.Allocator) Json {
- return Json{
- .mutex = Mutex.init(),
- .entries = std.StringHashMap(Entry).init(allocator),
- };
- }
- fn parse(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator, is_tsconfig: bool, func: anytype) anyerror!?js_ast.Expr {
- var temp_log = logger.Log.init(allocator);
- defer {
- temp_log.appendTo(log) catch {};
+ pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
+ return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig);
}
- return func(&source, &temp_log, allocator) catch handler: {
- break :handler null;
- };
- }
- pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
- return try parse(cache, log, source, allocator, false, json_parser.ParseJSON);
- }
-
- pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: *std.mem.Allocator) anyerror!?js_ast.Expr {
- return try parse(cache, log, source, allocator, true, json_parser.ParseTSConfig);
- }
+ };
};
-};
+}
+
+pub const Cache = NewCache(true);
+pub const ServeCache = NewCache(false);
diff --git a/src/fs.zig b/src/fs.zig
index 0cde895bc..77e85dcac 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -729,7 +729,14 @@ pub const FileSystem = struct {
}
}
- pub fn readFileWithHandle(fs: *RealFS, path: string, _size: ?usize, file: std.fs.File) !File {
+ pub fn readFileWithHandle(
+ fs: *RealFS,
+ path: string,
+ _size: ?usize,
+ file: std.fs.File,
+ comptime use_shared_buffer: bool,
+ shared_buffer: *MutableString,
+ ) !File {
FileSystem.setMaxFd(file.handle);
if (FeatureFlags.disable_filesystem_cache) {
@@ -742,10 +749,28 @@ pub const FileSystem = struct {
return err;
});
- const file_contents: []u8 = file.readToEndAllocOptions(fs.allocator, size, size, @alignOf(u8), null) catch |err| {
- fs.readFileError(path, err);
- return err;
- };
+ var file_contents: []u8 = undefined;
+
+ // When we're serving a JavaScript-like file over HTTP, we do not want to cache the contents in memory
+ // This imposes a performance hit because not reading from disk is faster than reading from disk
+ // Part of that hit is allocating a temporary buffer to store the file contents in
+ // As a mitigation, we can just keep one buffer forever and re-use it for the parsed files
+ if (use_shared_buffer) {
+ shared_buffer.reset();
+ try shared_buffer.growBy(size);
+ shared_buffer.list.expandToCapacity();
+ var read_count = file.readAll(shared_buffer.list.items) catch |err| {
+ fs.readFileError(path, err);
+ return err;
+ };
+ shared_buffer.list.items = shared_buffer.list.items[0..read_count];
+ file_contents = shared_buffer.list.items;
+ } else {
+ file_contents = file.readToEndAllocOptions(fs.allocator, size, size, @alignOf(u8), null) catch |err| {
+ fs.readFileError(path, err);
+ return err;
+ };
+ }
if (fs.watcher) |*watcher| {
fs.watcher_mutex.lock();
diff --git a/src/http.zig b/src/http.zig
index 24dbca10a..f9a4584b9 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -19,7 +19,7 @@ const Request = picohttp.Request;
const Response = picohttp.Response;
const Headers = picohttp.Headers;
const MimeType = @import("http/mime_type.zig");
-const Bundler = bundler.Bundler;
+const Bundler = bundler.ServeBundler;
const js_printer = @import("js_printer.zig");
const SOCKET_FLAGS = os.SOCK_CLOEXEC;
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 97940ff71..96ea1c9da 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -41,7 +41,6 @@ const last_high_surrogate: u21 = 0xDBFF;
const first_low_surrogate: u21 = 0xDC00;
const last_low_surrogate: u21 = 0xDFFF;
const assert = std.debug.assert;
-const Linker = @import("linker.zig").Linker;
fn notimpl() void {
Global.panic("Not implemented yet!", .{});
@@ -118,7 +117,7 @@ const ExprFlag = packed struct {
}
};
-pub fn NewPrinter(comptime ascii_only: bool, comptime Writer: type) type {
+pub fn NewPrinter(comptime ascii_only: bool, comptime Writer: type, comptime Linker: type) type {
// comptime const comptime_buf_len = 64;
// comptime var comptime_buf = [comptime_buf_len]u8{};
// comptime var comptime_buf_i: usize = 0;
@@ -3135,9 +3134,10 @@ pub fn printAst(
source: *const logger.Source,
ascii_only: bool,
opts: Options,
- linker: ?*Linker,
+ comptime LinkerType: type,
+ linker: ?*LinkerType,
) !usize {
- const PrinterType = NewPrinter(false, Writer);
+ const PrinterType = NewPrinter(false, Writer, LinkerType);
var writer = _writer;
var printer = try PrinterType.init(
writer,
diff --git a/src/linker.zig b/src/linker.zig
index 5490bad4e..9c41bf4be 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -26,277 +26,283 @@ const Bundler = _bundler.Bundler;
const ResolveQueue = _bundler.ResolveQueue;
const Runtime = @import("./runtime.zig").Runtime;
-pub const Linker = struct {
- allocator: *std.mem.Allocator,
- options: *Options.BundleOptions,
- fs: *Fs.FileSystem,
- log: *logger.Log,
- resolve_queue: *ResolveQueue,
- resolver: *Resolver.Resolver,
- resolve_results: *_bundler.ResolveResults,
- any_needs_runtime: bool = false,
- runtime_import_record: ?ImportRecord = null,
- runtime_source_path: string,
-
- pub fn init(
+pub fn NewLinker(comptime BundlerType: type) type {
+ return struct {
+ const ThisLinker = @This();
allocator: *std.mem.Allocator,
+ options: *Options.BundleOptions,
+ fs: *Fs.FileSystem,
log: *logger.Log,
resolve_queue: *ResolveQueue,
- options: *Options.BundleOptions,
- resolver: *Resolver.Resolver,
+ resolver: *BundlerType.Resolver,
resolve_results: *_bundler.ResolveResults,
- fs: *Fs.FileSystem,
- ) Linker {
- relative_paths_list = ImportPathsList.init(allocator);
-
- return Linker{
- .allocator = allocator,
- .options = options,
- .fs = fs,
- .log = log,
- .resolve_queue = resolve_queue,
- .resolver = resolver,
- .resolve_results = resolve_results,
- .runtime_source_path = fs.absAlloc(allocator, &([_]string{"__runtime.js"})) catch unreachable,
- };
- }
-
- // fs: fs.FileSystem,
- // TODO:
- pub fn requireOrImportMetaForSource(c: Linker, source_index: Ref.Int) RequireOrImportMeta {
- return RequireOrImportMeta{};
- }
-
- // pub const Scratch = struct {
- // threadlocal var externals: std.ArrayList(u32) = undefined;
- // threadlocal var has_externals: std.ArrayList(u32) = undefined;
- // pub fn externals() {
-
- // }
- // };
- // This modifies the Ast in-place!
- // But more importantly, this does the following:
- // - Wrap CommonJS files
- pub fn link(linker: *Linker, file_path: Fs.Path, result: *Bundler.ParseResult) !void {
- var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny();
- const source_dir = file_path.name.dir;
- var externals = std.ArrayList(u32).init(linker.allocator);
-
- // Step 1. Resolve imports & requires
- switch (result.loader) {
- .jsx, .js, .ts, .tsx => {
- for (result.ast.import_records) |*import_record, record_index| {
- if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) {
- import_record.path = try linker.generateImportPath(
- source_dir,
- linker.runtime_source_path,
- Runtime.version(),
- );
- result.ast.runtime_import_record_id = @truncate(u32, record_index);
- result.ast.needs_runtime = true;
- continue;
- }
+ any_needs_runtime: bool = false,
+ runtime_import_record: ?ImportRecord = null,
+ runtime_source_path: string,
+
+ pub fn init(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ resolve_queue: *ResolveQueue,
+ options: *Options.BundleOptions,
+ resolver: *BundlerType.Resolver,
+ resolve_results: *_bundler.ResolveResults,
+ fs: *Fs.FileSystem,
+ ) ThisLinker {
+ relative_paths_list = ImportPathsList.init(allocator);
+
+ return ThisLinker{
+ .allocator = allocator,
+ .options = options,
+ .fs = fs,
+ .log = log,
+ .resolve_queue = resolve_queue,
+ .resolver = resolver,
+ .resolve_results = resolve_results,
+ .runtime_source_path = fs.absAlloc(allocator, &([_]string{"__runtime.js"})) catch unreachable,
+ };
+ }
- if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
- if (resolved_import.is_external) {
- externals.append(@truncate(u32, record_index)) catch unreachable;
- continue;
- }
+ // fs: fs.FileSystem,
+ // TODO:
+ pub fn requireOrImportMetaForSource(c: ThisLinker, source_index: Ref.Int) RequireOrImportMeta {
+ return RequireOrImportMeta{};
+ }
- linker.processImportRecord(
- // Include trailing slash
- file_path.text[0 .. source_dir.len + 1],
- resolved_import,
- import_record,
- ) catch continue;
-
- // If we're importing a CommonJS module as ESM
- // We need to do the following transform:
- // import React from 'react';
- // =>
- // import {_require} from 'RUNTIME_IMPORTS';
- // import * as react_module from 'react';
- // var React = _require(react_module).default;
- // UNLESS it's a namespace import
- // If it's a namespace import, assume it's safe.
- // We can do this in the printer instead of creating a bunch of AST nodes here.
- // But we need to at least tell the printer that this needs to happen.
- if (import_record.kind == .stmt and resolved_import.shouldAssumeCommonJS(import_record)) {
- import_record.wrap_with_to_module = true;
+ // pub const Scratch = struct {
+ // threadlocal var externals: std.ArrayList(u32) = undefined;
+ // threadlocal var has_externals: std.ArrayList(u32) = undefined;
+ // pub fn externals() {
+
+ // }
+ // };
+ // This modifies the Ast in-place!
+ // But more importantly, this does the following:
+ // - Wrap CommonJS files
+ pub fn link(linker: *ThisLinker, file_path: Fs.Path, result: *_bundler.ParseResult) !void {
+ var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny();
+ const source_dir = file_path.name.dir;
+ var externals = std.ArrayList(u32).init(linker.allocator);
+
+ // Step 1. Resolve imports & requires
+ switch (result.loader) {
+ .jsx, .js, .ts, .tsx => {
+ for (result.ast.import_records) |*import_record, record_index| {
+ if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) {
+ import_record.path = try linker.generateImportPath(
+ source_dir,
+ linker.runtime_source_path,
+ Runtime.version(),
+ );
+ result.ast.runtime_import_record_id = @truncate(u32, record_index);
result.ast.needs_runtime = true;
+ continue;
}
- } else |err| {
- switch (err) {
- error.ModuleNotFound => {
- if (Resolver.Resolver.isPackagePath(import_record.path.text)) {
- if (linker.options.platform != .node and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
- try linker.log.addRangeErrorFmt(
- &result.source,
- import_record.range,
- linker.allocator,
- "Could not resolve: \"{s}\". Try setting --platform=\"node\"",
- .{import_record.path.text},
- );
+
+ if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
+ if (resolved_import.is_external) {
+ externals.append(@truncate(u32, record_index)) catch unreachable;
+ continue;
+ }
+
+ linker.processImportRecord(
+ // Include trailing slash
+ file_path.text[0 .. source_dir.len + 1],
+ resolved_import,
+ import_record,
+ ) catch continue;
+
+ // If we're importing a CommonJS module as ESM
+ // We need to do the following transform:
+ // import React from 'react';
+ // =>
+ // import {_require} from 'RUNTIME_IMPORTS';
+ // import * as react_module from 'react';
+ // var React = _require(react_module).default;
+ // UNLESS it's a namespace import
+ // If it's a namespace import, assume it's safe.
+ // We can do this in the printer instead of creating a bunch of AST nodes here.
+ // But we need to at least tell the printer that this needs to happen.
+ if (import_record.kind == .stmt and resolved_import.shouldAssumeCommonJS(import_record)) {
+ import_record.wrap_with_to_module = true;
+ result.ast.needs_runtime = true;
+ }
+ } else |err| {
+ switch (err) {
+ error.ModuleNotFound => {
+ if (BundlerType.Resolver.isPackagePath(import_record.path.text)) {
+ if (linker.options.platform != .node and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
+ try linker.log.addRangeErrorFmt(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Could not resolve: \"{s}\". Try setting --platform=\"node\"",
+ .{import_record.path.text},
+ );
+ } else {
+ try linker.log.addRangeErrorFmt(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?",
+ .{import_record.path.text},
+ );
+ }
} else {
try linker.log.addRangeErrorFmt(
&result.source,
import_record.range,
linker.allocator,
- "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?",
- .{import_record.path.text},
+ "Could not resolve: \"{s}\"",
+ .{
+ import_record.path.text,
+ },
);
+ continue;
}
- } else {
- try linker.log.addRangeErrorFmt(
- &result.source,
- import_record.range,
- linker.allocator,
- "Could not resolve: \"{s}\"",
- .{
- import_record.path.text,
- },
- );
+ },
+ else => {
continue;
- }
- },
- else => {
- continue;
- },
+ },
+ }
}
}
- }
- },
- else => {},
- }
- result.ast.externals = externals.toOwnedSlice();
-
- if (result.ast.needs_runtime and result.ast.runtime_import_record_id == null) {
- var import_records = try linker.allocator.alloc(ImportRecord, result.ast.import_records.len + 1);
- std.mem.copy(ImportRecord, import_records, result.ast.import_records);
- import_records[import_records.len - 1] = ImportRecord{
- .kind = .stmt,
- .path = try linker.generateImportPath(
- source_dir,
- linker.runtime_source_path,
- Runtime.version(),
- ),
- .range = logger.Range{ .loc = logger.Loc{ .start = 0 }, .len = 0 },
- };
- }
- }
-
- const ImportPathsList = allocators.BSSStringList(512, 128);
- pub var relative_paths_list: *ImportPathsList = undefined;
- threadlocal var relative_path_allocator: std.heap.FixedBufferAllocator = undefined;
- threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
- threadlocal var relative_path_allocator_buf_loaded: bool = false;
-
- pub fn generateImportPath(linker: *Linker, source_dir: string, source_path: string, package_version: ?string) !Fs.Path {
- if (!relative_path_allocator_buf_loaded) {
- relative_path_allocator_buf_loaded = true;
- relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
+ },
+ else => {},
+ }
+ result.ast.externals = externals.toOwnedSlice();
+
+ if (result.ast.needs_runtime and result.ast.runtime_import_record_id == null) {
+ var import_records = try linker.allocator.alloc(ImportRecord, result.ast.import_records.len + 1);
+ std.mem.copy(ImportRecord, import_records, result.ast.import_records);
+ import_records[import_records.len - 1] = ImportRecord{
+ .kind = .stmt,
+ .path = try linker.generateImportPath(
+ source_dir,
+ linker.runtime_source_path,
+ Runtime.version(),
+ ),
+ .range = logger.Range{ .loc = logger.Loc{ .start = 0 }, .len = 0 },
+ };
+ }
}
- defer relative_path_allocator.reset();
- var absolute_pathname = Fs.PathName.init(source_path);
+ const ImportPathsList = allocators.BSSStringList(512, 128);
+ pub var relative_paths_list: *ImportPathsList = undefined;
+ threadlocal var relative_path_allocator: std.heap.FixedBufferAllocator = undefined;
+ threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
+ threadlocal var relative_path_allocator_buf_loaded: bool = false;
- if (!linker.options.preserve_extensions) {
- if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
- absolute_pathname.ext = ext;
+ pub fn generateImportPath(linker: *ThisLinker, source_dir: string, source_path: string, package_version: ?string) !Fs.Path {
+ if (!relative_path_allocator_buf_loaded) {
+ relative_path_allocator_buf_loaded = true;
+ relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
}
- }
+ defer relative_path_allocator.reset();
- switch (linker.options.import_path_format) {
- .relative => {
- var pretty = try linker.allocator.dupe(u8, linker.fs.relative(source_dir, source_path));
- var pathname = Fs.PathName.init(pretty);
- return Fs.Path.initWithPretty(pretty, pretty);
- },
- .relative_nodejs => {
- var pretty = try linker.allocator.dupe(u8, linker.fs.relative(source_dir, source_path));
- var pathname = Fs.PathName.init(pretty);
- var path = Fs.Path.initWithPretty(pretty, pretty);
- path.text = path.text[0 .. path.text.len - path.name.ext.len];
- return path;
- },
-
- .absolute_url => {
- var base = linker.fs.relativeTo(source_path);
- if (strings.lastIndexOfChar(base, '.')) |dot| {
- base = base[0..dot];
- }
+ var absolute_pathname = Fs.PathName.init(source_path);
- if (linker.options.append_package_version_in_query_string and package_version != null) {
- const absolute_url =
- try std.fmt.allocPrint(
- linker.allocator,
- "{s}{s}{s}?v={s}",
- .{
- linker.options.public_url,
- base,
- absolute_pathname.ext,
- package_version.?,
- },
- );
-
- return Fs.Path.initWithPretty(absolute_url, absolute_url);
- } else {
- const absolute_url = try std.fmt.allocPrint(
- linker.allocator,
- "{s}{s}{s}",
- .{
- linker.options.public_url,
- base,
- absolute_pathname.ext,
- },
- );
-
- return Fs.Path.initWithPretty(absolute_url, absolute_url);
+ if (!linker.options.preserve_extensions) {
+ if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
+ absolute_pathname.ext = ext;
}
- },
+ }
- else => unreachable,
- }
- }
+ switch (linker.options.import_path_format) {
+ .relative => {
+ var pretty = try linker.allocator.dupe(u8, linker.fs.relative(source_dir, source_path));
+ var pathname = Fs.PathName.init(pretty);
+ return Fs.Path.initWithPretty(pretty, pretty);
+ },
+ .relative_nodejs => {
+ var pretty = try linker.allocator.dupe(u8, linker.fs.relative(source_dir, source_path));
+ var pathname = Fs.PathName.init(pretty);
+ var path = Fs.Path.initWithPretty(pretty, pretty);
+ path.text = path.text[0 .. path.text.len - path.name.ext.len];
+ return path;
+ },
+
+ .absolute_url => {
+ var base = linker.fs.relativeTo(source_path);
+ if (strings.lastIndexOfChar(base, '.')) |dot| {
+ base = base[0..dot];
+ }
+
+ if (linker.options.append_package_version_in_query_string and package_version != null) {
+ const absolute_url =
+ try std.fmt.allocPrint(
+ linker.allocator,
+ "{s}{s}{s}?v={s}",
+ .{
+ linker.options.public_url,
+ base,
+ absolute_pathname.ext,
+ package_version.?,
+ },
+ );
- pub fn processImportRecord(linker: *Linker, source_dir: string, resolve_result: *Resolver.Resolver.Result, import_record: *ImportRecord) !void {
+ return Fs.Path.initWithPretty(absolute_url, absolute_url);
+ } else {
+ const absolute_url = try std.fmt.allocPrint(
+ linker.allocator,
+ "{s}{s}{s}",
+ .{
+ linker.options.public_url,
+ base,
+ absolute_pathname.ext,
+ },
+ );
- // extremely naive.
- resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
+ return Fs.Path.initWithPretty(absolute_url, absolute_url);
+ }
+ },
- // lazy means:
- // Run the resolver
- // Don't parse/print automatically.
- if (linker.options.resolve_mode != .lazy) {
- try linker.enqueueResolveResult(resolve_result);
+ else => unreachable,
+ }
}
- import_record.path = try linker.generateImportPath(
- source_dir,
- resolve_result.path_pair.primary.text,
- resolve_result.package_json_version,
- );
- }
+ pub fn processImportRecord(linker: *ThisLinker, source_dir: string, resolve_result: *Resolver.Result, import_record: *ImportRecord) !void {
+
+ // extremely naive.
+ resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
- pub fn resolveResultHashKey(linker: *Linker, resolve_result: *const Resolver.Resolver.Result) string {
- var hash_key = resolve_result.path_pair.primary.text;
+ // lazy means:
+ // Run the resolver
+ // Don't parse/print automatically.
+ if (linker.options.resolve_mode != .lazy) {
+ try linker.enqueueResolveResult(resolve_result);
+ }
- // Shorter hash key is faster to hash
- if (strings.startsWith(resolve_result.path_pair.primary.text, linker.fs.top_level_dir)) {
- hash_key = resolve_result.path_pair.primary.text[linker.fs.top_level_dir.len..];
+ import_record.path = try linker.generateImportPath(
+ source_dir,
+ resolve_result.path_pair.primary.text,
+ resolve_result.package_json_version,
+ );
}
- return hash_key;
- }
+ pub fn resolveResultHashKey(linker: *ThisLinker, resolve_result: *const Resolver.Result) string {
+ var hash_key = resolve_result.path_pair.primary.text;
- pub fn enqueueResolveResult(linker: *Linker, resolve_result: *const Resolver.Resolver.Result) !void {
- const hash_key = linker.resolveResultHashKey(resolve_result);
+ // Shorter hash key is faster to hash
+ if (strings.startsWith(resolve_result.path_pair.primary.text, linker.fs.top_level_dir)) {
+ hash_key = resolve_result.path_pair.primary.text[linker.fs.top_level_dir.len..];
+ }
+
+ return hash_key;
+ }
- const get_or_put_entry = try linker.resolve_results.backing.getOrPut(hash_key);
+ pub fn enqueueResolveResult(linker: *ThisLinker, resolve_result: *const Resolver.Result) !void {
+ const hash_key = linker.resolveResultHashKey(resolve_result);
- if (!get_or_put_entry.found_existing) {
- get_or_put_entry.entry.value = resolve_result.*;
- try linker.resolve_queue.writeItem(resolve_result.*);
+ const get_or_put_entry = try linker.resolve_results.backing.getOrPut(hash_key);
+
+ if (!get_or_put_entry.found_existing) {
+ get_or_put_entry.entry.value = resolve_result.*;
+ try linker.resolve_queue.writeItem(resolve_result.*);
+ }
}
- }
-};
+ };
+}
+
+pub const Linker = NewLinker(_bundler.Bundler);
+pub const ServeLinker = NewLinker(_bundler.ServeBundler);
diff --git a/src/options.zig b/src/options.zig
index c0973016e..b68f753a3 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -769,12 +769,12 @@ pub const OutputFile = struct {
move: FileOperation,
copy: FileOperation,
noop: u0,
- pending: resolver.Resolver.Result,
+ pending: resolver.Result,
};
pub const Kind = enum { move, copy, noop, buffer, pending };
- pub fn initPending(loader: Loader, pending: resolver.Resolver.Result) OutputFile {
+ pub fn initPending(loader: Loader, pending: resolver.Result) OutputFile {
return .{
.loader = .file,
.input = pending.path_pair.primary,
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 21f4c0906..9a7c12348 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -45,13 +45,13 @@ pub const PackageJSON = struct {
//
browser_map: BrowserMap,
- pub fn parse(r: *resolver.Resolver, input_path: string, dirname_fd: StoredFileDescriptorType) ?PackageJSON {
+ pub fn parse(comptime ResolverType: type, r: *ResolverType, input_path: string, dirname_fd: StoredFileDescriptorType) ?PackageJSON {
const parts = [_]string{ input_path, "package.json" };
const package_json_path_ = r.fs.abs(&parts);
const package_json_path = r.fs.filename_store.append(package_json_path_) catch unreachable;
- const entry = r.caches.fs.readFile(r.fs, package_json_path, dirname_fd) catch |err| {
+ const entry = r.caches.fs.readFile(r.fs, package_json_path, dirname_fd, false) catch |err| {
if (err != error.IsDir) {
r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot read file \"{s}\": {s}", .{ r.prettyPath(fs.Path.init(input_path)), @errorName(err) }) catch unreachable;
}
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 8d26a3dad..8dc753b83 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -91,1688 +91,1698 @@ pub const TemporaryBuffer = struct {
pub threadlocal var TSConfigMatchFullBuf = std.mem.zeroes([512]u8);
};
-// TODO:
-// - Fix "browser" field mapping
-// - Consider removing the string list abstraction?
-pub const Resolver = struct {
- opts: options.BundleOptions,
- fs: *Fs.FileSystem,
- log: *logger.Log,
- allocator: *std.mem.Allocator,
-
- debug_logs: ?DebugLogs = null,
- elapsed: i128 = 0, // tracing
-
- caches: cache.Cache.Set,
-
- // These are sets that represent various conditions for the "exports" field
- // in package.json.
- // esm_conditions_default: std.StringHashMap(bool),
- // esm_conditions_import: std.StringHashMap(bool),
- // esm_conditions_require: std.StringHashMap(bool),
-
- // A special filtered import order for CSS "@import" imports.
- //
- // The "resolve extensions" setting determines the order of implicit
- // extensions to try when resolving imports with the extension omitted.
- // Sometimes people create a JavaScript/TypeScript file and a CSS file with
- // the same name when they create a component. At a high level, users expect
- // implicit extensions to resolve to the JS file when being imported from JS
- // and to resolve to the CSS file when being imported from CSS.
- //
- // Different bundlers handle this in different ways. Parcel handles this by
- // having the resolver prefer the same extension as the importing file in
- // front of the configured "resolve extensions" order. Webpack's "css-loader"
- // plugin just explicitly configures a special "resolve extensions" order
- // consisting of only ".css" for CSS files.
- //
- // It's unclear what behavior is best here. What we currently do is to create
- // a special filtered version of the configured "resolve extensions" order
- // for CSS files that filters out any extension that has been explicitly
- // configured with a non-CSS loader. This still gives users control over the
- // order but avoids the scenario where we match an import in a CSS file to a
- // JavaScript-related file. It's probably not perfect with plugins in the
- // picture but it's better than some alternatives and probably pretty good.
- // atImportExtensionOrder []string
-
- // This mutex serves two purposes. First of all, it guards access to "dirCache"
- // which is potentially mutated during path resolution. But this mutex is also
- // necessary for performance. The "React admin" benchmark mysteriously runs
- // twice as fast when this mutex is locked around the whole resolve operation
- // instead of around individual accesses to "dirCache". For some reason,
- // reducing parallelism in the resolver helps the rest of the bundler go
- // faster. I'm not sure why this is but please don't change this unless you
- // do a lot of testing with various benchmarks and there aren't any regressions.
- mutex: Mutex,
-
- // This cache maps a directory path to information about that directory and
- // all parent directories
- dir_cache: *DirInfo.HashMap,
-
- pub fn init1(
- allocator: *std.mem.Allocator,
- log: *logger.Log,
- _fs: *Fs.FileSystem,
- opts: options.BundleOptions,
- ) Resolver {
- return Resolver{
- .allocator = allocator,
- .dir_cache = DirInfo.HashMap.init(allocator),
- .mutex = Mutex.init(),
- .caches = cache.Cache.Set.init(allocator),
- .opts = opts,
- .fs = _fs,
- .log = log,
- };
- }
+pub const PathPair = struct {
+ primary: Path,
+ secondary: ?Path = null,
- pub const DebugLogs = struct {
- what: string = "",
- indent: MutableString,
- notes: std.ArrayList(logger.Data),
-
- pub const FlushMode = enum { fail, success };
-
- pub fn init(allocator: *std.mem.Allocator) !DebugLogs {
- var mutable = try MutableString.init(allocator, 0);
- return DebugLogs{
- .indent = mutable,
- .notes = std.ArrayList(logger.Data).init(allocator),
- };
- }
-
- pub fn deinit(d: DebugLogs) void {
- var allocator = d.notes.allocator;
- d.notes.deinit();
- // d.indent.deinit();
- }
-
- pub fn increaseIndent(d: *DebugLogs) !void {
- try d.indent.append(" ");
- }
+ pub const Iter = struct {
+ index: u2,
+ ctx: *PathPair,
+ pub fn next(i: *Iter) ?Path {
+ const ind = i.index;
+ i.index += 1;
- pub fn decreaseIndent(d: *DebugLogs) !void {
- d.indent.list.shrinkRetainingCapacity(d.indent.list.items.len - 1);
+ switch (ind) {
+ 0 => return i.ctx.primary,
+ 1 => return i.ctx.secondary,
+ else => return null,
+ }
}
+ };
- pub fn addNote(d: *DebugLogs, _text: string) !void {
- var text = _text;
- const len = d.indent.len();
- if (len > 0) {
- var __text = try d.notes.allocator.alloc(u8, text.len + len);
- std.mem.copy(u8, __text, d.indent.list.items);
- std.mem.copy(u8, __text[len..__text.len], _text);
- d.notes.allocator.free(_text);
- }
+ pub fn iter(p: *PathPair) Iter {
+ return Iter{ .ctx = p, .index = 0 };
+ }
+};
- try d.notes.append(logger.rangeData(null, logger.Range.None, text));
- }
+pub const Result = struct {
+ path_pair: PathPair,
- pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) !void {
- return try d.addNote(try std.fmt.allocPrint(d.notes.allocator, fmt, args));
- }
- };
+ jsx: options.JSX.Pragma = options.JSX.Pragma{},
- pub const PathPair = struct {
- primary: Path,
- secondary: ?Path = null,
-
- pub const Iter = struct {
- index: u2,
- ctx: *PathPair,
- pub fn next(i: *Iter) ?Path {
- const ind = i.index;
- i.index += 1;
-
- switch (ind) {
- 0 => return i.ctx.primary,
- 1 => return i.ctx.secondary,
- else => return null,
- }
- }
- };
+ package_json_version: ?string = null,
- pub fn iter(p: *PathPair) Iter {
- return Iter{ .ctx = p, .index = 0 };
- }
- };
+ is_external: bool = false,
- pub const Result = struct {
- path_pair: PathPair,
+ // This is true when the package was loaded from within the node_modules directory.
+ is_from_node_modules: bool = false,
- jsx: options.JSX.Pragma = options.JSX.Pragma{},
+ diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
- package_json_version: ?string = null,
+ // If present, any ES6 imports to this file can be considered to have no side
+ // effects. This means they should be removed if unused.
+ primary_side_effects_data: ?SideEffectsData = null,
- is_external: bool = false,
+ // If true, the class field transform should use Object.defineProperty().
+ use_define_for_class_fields_ts: ?bool = null,
- // This is true when the package was loaded from within the node_modules directory.
- is_from_node_modules: bool = false,
+ // If true, unused imports are retained in TypeScript code. This matches the
+ // behavior of the "importsNotUsedAsValues" field in "tsconfig.json" when the
+ // value is not "remove".
+ preserve_unused_imports_ts: bool = false,
- diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
+ // This is the "type" field from "package.json"
+ module_type: options.ModuleType = options.ModuleType.unknown,
- // If present, any ES6 imports to this file can be considered to have no side
- // effects. This means they should be removed if unused.
- primary_side_effects_data: ?SideEffectsData = null,
+ debug_meta: ?DebugMeta = null,
- // If true, the class field transform should use Object.defineProperty().
- use_define_for_class_fields_ts: ?bool = null,
+ dirname_fd: StoredFileDescriptorType = 0,
+ file_fd: StoredFileDescriptorType = 0,
- // If true, unused imports are retained in TypeScript code. This matches the
- // behavior of the "importsNotUsedAsValues" field in "tsconfig.json" when the
- // value is not "remove".
- preserve_unused_imports_ts: bool = false,
+ // Most NPM modules are CommonJS
+ // If unspecified, assume CommonJS.
+ // If internal app code, assume ESM.
+ pub fn shouldAssumeCommonJS(r: *const Result, import_record: *const ast.ImportRecord) bool {
+ if (import_record.kind == .require or import_record.kind == .require_resolve or r.module_type == .cjs) {
+ return true;
+ }
- // This is the "type" field from "package.json"
- module_type: options.ModuleType = options.ModuleType.unknown,
+ if (r.module_type == .esm) {
+ return false;
+ }
- debug_meta: ?DebugMeta = null,
+ return r.is_from_node_modules;
+ }
- dirname_fd: StoredFileDescriptorType = 0,
- file_fd: StoredFileDescriptorType = 0,
+ pub const DebugMeta = struct {
+ notes: std.ArrayList(logger.Data),
+ suggestion_text: string = "",
+ suggestion_message: string = "",
- // Most NPM modules are CommonJS
- // If unspecified, assume CommonJS.
- // If internal app code, assume ESM.
- pub fn shouldAssumeCommonJS(r: *const Result, import_record: *const ast.ImportRecord) bool {
- if (import_record.kind == .require or import_record.kind == .require_resolve or r.module_type == .cjs) {
- return true;
- }
+ pub fn init(allocator: *std.mem.Allocator) DebugMeta {
+ return DebugMeta{ .notes = std.ArrayList(logger.Data).init(allocator) };
+ }
- if (r.module_type == .esm) {
- return false;
+ pub fn logErrorMsg(m: *DebugMeta, log: *logger.Log, _source: ?*const logger.Source, r: logger.Range, comptime fmt: string, args: anytype) !void {
+ if (_source != null and m.suggestion_message.len > 0) {
+ const data = logger.rangeData(_source.?, r, m.suggestion_message);
+ data.location.?.suggestion = m.suggestion_text;
+ try m.notes.append(data);
}
- return r.is_from_node_modules;
+ try log.addMsg(Msg{
+ .kind = .err,
+ .data = logger.rangeData(_source, r, std.fmt.allocPrint(m.notes.allocator, fmt, args)),
+ .notes = m.toOwnedSlice(),
+ });
}
+ };
+};
- pub const DebugMeta = struct {
- notes: std.ArrayList(logger.Data),
- suggestion_text: string = "",
- suggestion_message: string = "",
+pub const DirEntryResolveQueueItem = struct { result: allocators.Result, unsafe_path: string };
+threadlocal var _dir_entry_paths_to_resolve: [256]DirEntryResolveQueueItem = undefined;
+threadlocal var _open_dirs: [256]std.fs.Dir = undefined;
- pub fn init(allocator: *std.mem.Allocator) DebugMeta {
- return DebugMeta{ .notes = std.ArrayList(logger.Data).init(allocator) };
- }
+threadlocal var tsconfig_base_url_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
- pub fn logErrorMsg(m: *DebugMeta, log: *logger.Log, _source: ?*const logger.Source, r: logger.Range, comptime fmt: string, args: anytype) !void {
- if (_source != null and m.suggestion_message.len > 0) {
- const data = logger.rangeData(_source.?, r, m.suggestion_message);
- data.location.?.suggestion = m.suggestion_text;
- try m.notes.append(data);
- }
+pub const DebugLogs = struct {
+ what: string = "",
+ indent: MutableString,
+ notes: std.ArrayList(logger.Data),
- try log.addMsg(Msg{
- .kind = .err,
- .data = logger.rangeData(_source, r, std.fmt.allocPrint(m.notes.allocator, fmt, args)),
- .notes = m.toOwnedSlice(),
- });
- }
+ pub const FlushMode = enum { fail, success };
+
+ pub fn init(allocator: *std.mem.Allocator) !DebugLogs {
+ var mutable = try MutableString.init(allocator, 0);
+ return DebugLogs{
+ .indent = mutable,
+ .notes = std.ArrayList(logger.Data).init(allocator),
};
- };
+ }
- pub fn isExternalPattern(r: *Resolver, import_path: string) bool {
- for (r.opts.external.patterns) |pattern| {
- if (import_path.len >= pattern.prefix.len + pattern.suffix.len and (strings.startsWith(
- import_path,
- pattern.prefix,
- ) and strings.endsWith(
- import_path,
- pattern.suffix,
- ))) {
- return true;
- }
- }
- return false;
+ pub fn deinit(d: DebugLogs) void {
+ var allocator = d.notes.allocator;
+ d.notes.deinit();
+ // d.indent.deinit();
}
- pub fn flushDebugLogs(r: *Resolver, flush_mode: DebugLogs.FlushMode) !void {
- if (r.debug_logs) |*debug| {
- defer {
- debug.deinit();
- r.debug_logs = null;
- }
+ pub fn increaseIndent(d: *DebugLogs) !void {
+ try d.indent.append(" ");
+ }
- if (flush_mode == DebugLogs.FlushMode.fail) {
- try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice());
- } else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) {
- try r.log.addVerboseWithNotes(null, logger.Loc.Empty, debug.what, debug.notes.toOwnedSlice());
- }
+ pub fn decreaseIndent(d: *DebugLogs) !void {
+ d.indent.list.shrinkRetainingCapacity(d.indent.list.items.len - 1);
+ }
+
+ pub fn addNote(d: *DebugLogs, _text: string) !void {
+ var text = _text;
+ const len = d.indent.len();
+ if (len > 0) {
+ var __text = try d.notes.allocator.alloc(u8, text.len + len);
+ std.mem.copy(u8, __text, d.indent.list.items);
+ std.mem.copy(u8, __text[len..__text.len], _text);
+ d.notes.allocator.free(_text);
}
+
+ try d.notes.append(logger.rangeData(null, logger.Range.None, text));
}
- var tracing_start: i128 = if (FeatureFlags.tracing) 0 else undefined;
- threadlocal var relative_abs_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) !void {
+ return try d.addNote(try std.fmt.allocPrint(d.notes.allocator, fmt, args));
+ }
+};
- pub fn resolve(r: *Resolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result {
- if (FeatureFlags.tracing) {
- tracing_start = std.time.nanoTimestamp();
- }
- defer {
- if (FeatureFlags.tracing) {
- r.elapsed += std.time.nanoTimestamp() - tracing_start;
- }
- }
- if (r.log.level == .verbose) {
- if (r.debug_logs != null) {
- r.debug_logs.?.deinit();
- }
+pub const TSConfigExtender = struct {
+ visited: *StringBoolMap,
+ file_dir: string,
+ r: *ThisResolver,
+
+ pub fn extends(ctx: *TSConfigExtender, extends: String, range: logger.Range) ?*TSConfigJSON {
+ Global.notimpl();
+ // if (isPackagePath(extends)) {
+ // // // If this is a package path, try to resolve it to a "node_modules"
+ // // // folder. This doesn't use the normal node module resolution algorithm
+ // // // both because it's different (e.g. we don't want to match a directory)
+ // // // and because it would deadlock since we're currently in the middle of
+ // // // populating the directory info cache.
+ // // var current = ctx.file_dir;
+ // // while (true) {
+ // // // Skip "node_modules" folders
+ // // if (!strings.eql(std.fs.path.basename(current), "node_modules")) {
+ // // var paths1 = [_]string{ current, "node_modules", extends };
+ // // var join1 = r.fs.absAlloc(ctx.r.allocator, &paths1) catch unreachable;
+ // // const res = ctx.r.parseTSConfig(join1, ctx.1) catch |err| {
+ // // if (err == error.ENOENT) {
+ // // continue;
+ // // } else if (err == error.ParseErrorImportCycle) {} else if (err != error.ParseErrorAlreadyLogged) {}
+ // // return null;
+ // // };
+ // // return res;
+
+ // // }
+ // // }
+ // }
+ }
+};
- r.debug_logs = try DebugLogs.init(r.allocator);
- }
+pub const MatchResult = struct {
+ path_pair: PathPair,
+ dirname_fd: StoredFileDescriptorType = 0,
+ file_fd: StoredFileDescriptorType = 0,
+ is_node_module: bool = false,
+ package_json_version: ?string = null,
+ diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
+};
+
+pub const LoadResult = struct {
+ path: string,
+ diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase,
+ dirname_fd: StoredFileDescriptorType = 0,
+};
- // Certain types of URLs default to being external for convenience
- if (r.isExternalPattern(import_path) or
- // "fill: url(#filter);"
- (kind.isFromCSS() and strings.startsWith(import_path, "#")) or
+// TODO:
+// - Fix "browser" field mapping
+// - Consider removing the string list abstraction?
+pub fn NewResolver(cache_files: bool) type {
+ const CacheSet = if (cache_files) cache.Cache.Set else cache.ServeCache.Set;
- // "background: url(http://example.com/images/image.png);"
- strings.startsWith(import_path, "http://") or
+ return struct {
+ const ThisResolver = @This();
+ opts: options.BundleOptions,
+ fs: *Fs.FileSystem,
+ log: *logger.Log,
+ allocator: *std.mem.Allocator,
- // "background: url(https://example.com/images/image.png);"
- strings.startsWith(import_path, "https://") or
+ debug_logs: ?DebugLogs = null,
+ elapsed: i128 = 0, // tracing
- // "background: url(//example.com/images/image.png);"
- strings.startsWith(import_path, "//"))
- {
- if (r.debug_logs) |*debug| {
- try debug.addNote("Marking this path as implicitly external");
- }
- r.flushDebugLogs(.success) catch {};
- return Result{
- .path_pair = PathPair{
- .primary = Path.init(import_path),
- },
- .is_external = true,
- .module_type = .esm,
+ caches: CacheSet,
+
+ // These are sets that represent various conditions for the "exports" field
+ // in package.json.
+ // esm_conditions_default: std.StringHashMap(bool),
+ // esm_conditions_import: std.StringHashMap(bool),
+ // esm_conditions_require: std.StringHashMap(bool),
+
+ // A special filtered import order for CSS "@import" imports.
+ //
+ // The "resolve extensions" setting determines the order of implicit
+ // extensions to try when resolving imports with the extension omitted.
+ // Sometimes people create a JavaScript/TypeScript file and a CSS file with
+ // the same name when they create a component. At a high level, users expect
+ // implicit extensions to resolve to the JS file when being imported from JS
+ // and to resolve to the CSS file when being imported from CSS.
+ //
+ // Different bundlers handle this in different ways. Parcel handles this by
+ // having the resolver prefer the same extension as the importing file in
+ // front of the configured "resolve extensions" order. Webpack's "css-loader"
+ // plugin just explicitly configures a special "resolve extensions" order
+ // consisting of only ".css" for CSS files.
+ //
+ // It's unclear what behavior is best here. What we currently do is to create
+ // a special filtered version of the configured "resolve extensions" order
+ // for CSS files that filters out any extension that has been explicitly
+ // configured with a non-CSS loader. This still gives users control over the
+ // order but avoids the scenario where we match an import in a CSS file to a
+ // JavaScript-related file. It's probably not perfect with plugins in the
+ // picture but it's better than some alternatives and probably pretty good.
+ // atImportExtensionOrder []string
+
+ // This mutex serves two purposes. First of all, it guards access to "dirCache"
+ // which is potentially mutated during path resolution. But this mutex is also
+ // necessary for performance. The "React admin" benchmark mysteriously runs
+ // twice as fast when this mutex is locked around the whole resolve operation
+ // instead of around individual accesses to "dirCache". For some reason,
+ // reducing parallelism in the resolver helps the rest of the bundler go
+ // faster. I'm not sure why this is but please don't change this unless you
+ // do a lot of testing with various benchmarks and there aren't any regressions.
+ mutex: Mutex,
+
+ // This cache maps a directory path to information about that directory and
+ // all parent directories
+ dir_cache: *DirInfo.HashMap,
+
+ pub fn init1(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ _fs: *Fs.FileSystem,
+ opts: options.BundleOptions,
+ ) ThisResolver {
+ return ThisResolver{
+ .allocator = allocator,
+ .dir_cache = DirInfo.HashMap.init(allocator),
+ .mutex = Mutex.init(),
+ .caches = CacheSet.init(allocator),
+ .opts = opts,
+ .fs = _fs,
+ .log = log,
};
}
- if (DataURL.parse(import_path)) |_data_url| {
- const data_url: DataURL = _data_url;
- // "import 'data:text/javascript,console.log(123)';"
- // "@import 'data:text/css,body{background:white}';"
- if (data_url.decode_mime_type() != .Unsupported) {
- if (r.debug_logs) |*debug| {
- debug.addNote("Putting this path in the \"dataurl\" namespace") catch {};
+ pub fn isExternalPattern(r: *ThisResolver, import_path: string) bool {
+ for (r.opts.external.patterns) |pattern| {
+ if (import_path.len >= pattern.prefix.len + pattern.suffix.len and (strings.startsWith(
+ import_path,
+ pattern.prefix,
+ ) and strings.endsWith(
+ import_path,
+ pattern.suffix,
+ ))) {
+ return true;
}
- r.flushDebugLogs(.success) catch {};
- return Resolver.Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } };
}
+ return false;
+ }
- // "background: url(data:image/png;base64,iVBORw0KGgo=);"
+ pub fn flushDebugLogs(r: *ThisResolver, flush_mode: DebugLogs.FlushMode) !void {
if (r.debug_logs) |*debug| {
- debug.addNote("Marking this \"dataurl\" as external") catch {};
+ defer {
+ debug.deinit();
+ r.debug_logs = null;
+ }
+
+ if (flush_mode == DebugLogs.FlushMode.fail) {
+ try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice());
+ } else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) {
+ try r.log.addVerboseWithNotes(null, logger.Loc.Empty, debug.what, debug.notes.toOwnedSlice());
+ }
}
- r.flushDebugLogs(.success) catch {};
- return Resolver.Result{
- .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") },
- .is_external = true,
- };
}
+ var tracing_start: i128 = if (FeatureFlags.tracing) 0 else undefined;
- // Fail now if there is no directory to resolve in. This can happen for
- // virtual modules (e.g. stdin) if a resolve directory is not specified.
- if (source_dir.len == 0) {
- if (r.debug_logs) |*debug| {
- debug.addNote("Cannot resolve this path without a directory") catch {};
+ threadlocal var relative_abs_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+
+ pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result {
+ if (FeatureFlags.tracing) {
+ tracing_start = std.time.nanoTimestamp();
}
- r.flushDebugLogs(.fail) catch {};
- return error.MissingResolveDir;
- }
+ defer {
+ if (FeatureFlags.tracing) {
+ r.elapsed += std.time.nanoTimestamp() - tracing_start;
+ }
+ }
+ if (r.log.level == .verbose) {
+ if (r.debug_logs != null) {
+ r.debug_logs.?.deinit();
+ }
- r.mutex.lock();
- defer r.mutex.unlock();
+ r.debug_logs = try DebugLogs.init(r.allocator);
+ }
- var result = try r.resolveWithoutSymlinks(source_dir, import_path, kind);
+ // Certain types of URLs default to being external for convenience
+ if (r.isExternalPattern(import_path) or
+ // "fill: url(#filter);"
+ (kind.isFromCSS() and strings.startsWith(import_path, "#")) or
- return result orelse error.ModuleNotFound;
- }
+ // "background: url(http://example.com/images/image.png);"
+ strings.startsWith(import_path, "http://") or
- pub fn resolveWithoutSymlinks(r: *Resolver, source_dir: string, import_path: string, kind: ast.ImportKind) !?Result {
- // This implements the module resolution algorithm from node.js, which is
- // described here: https://nodejs.org/api/modules.html#modules_all_together
- var result: Result = Result{ .path_pair = PathPair{ .primary = Path.init("") } };
-
- // Return early if this is already an absolute path. In addition to asking
- // the file system whether this is an absolute path, we also explicitly check
- // whether it starts with a "/" and consider that an absolute path too. This
- // is because relative paths can technically start with a "/" on Windows
- // because it's not an absolute path on Windows. Then people might write code
- // with imports that start with a "/" that works fine on Windows only to
- // experience unexpected build failures later on other operating systems.
- // Treating these paths as absolute paths on all platforms means Windows
- // users will not be able to accidentally make use of these paths.
- if (strings.startsWith(import_path, "/") or std.fs.path.isAbsolutePosix(import_path)) {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path}) catch {};
+ // "background: url(https://example.com/images/image.png);"
+ strings.startsWith(import_path, "https://") or
+
+ // "background: url(//example.com/images/image.png);"
+ strings.startsWith(import_path, "//"))
+ {
+ if (r.debug_logs) |*debug| {
+ try debug.addNote("Marking this path as implicitly external");
+ }
+ r.flushDebugLogs(.success) catch {};
+ return Result{
+ .path_pair = PathPair{
+ .primary = Path.init(import_path),
+ },
+ .is_external = true,
+ .module_type = .esm,
+ };
}
- // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
- if ((r.dirInfoCached(source_dir) catch null)) |_dir_info| {
- const dir_info: *DirInfo = _dir_info;
- if (dir_info.tsconfig_json) |tsconfig| {
- if (tsconfig.paths.count() > 0) {
- if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
- return Result{
- .path_pair = res.path_pair,
- .diff_case = res.diff_case,
- .dirname_fd = dir_info.getFileDescriptor(),
- .is_from_node_modules = res.is_node_module,
- };
- }
+ if (DataURL.parse(import_path)) |_data_url| {
+ const data_url: DataURL = _data_url;
+ // "import 'data:text/javascript,console.log(123)';"
+ // "@import 'data:text/css,body{background:white}';"
+ if (data_url.decode_mime_type() != .Unsupported) {
+ if (r.debug_logs) |*debug| {
+ debug.addNote("Putting this path in the \"dataurl\" namespace") catch {};
}
+ r.flushDebugLogs(.success) catch {};
+ return Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } };
}
- }
- if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.exists(import_path)) {
- // If the string literal in the source text is an absolute path and has
- // been marked as an external module, mark it as *not* an absolute path.
- // That way we preserve the literal text in the output and don't generate
- // a relative path from the output directory to that path.
+ // "background: url(data:image/png;base64,iVBORw0KGgo=);"
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path}) catch {};
+ debug.addNote("Marking this \"dataurl\" as external") catch {};
}
-
+ r.flushDebugLogs(.success) catch {};
return Result{
- .path_pair = .{ .primary = Path.init(import_path) },
+ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") },
.is_external = true,
};
}
- // Run node's resolution rules (e.g. adding ".js")
- if (r.loadAsFileOrDirectory(import_path, kind)) |entry| {
- return Result{
- .dirname_fd = entry.dirname_fd,
- .path_pair = entry.path_pair,
- .diff_case = entry.diff_case,
- .is_from_node_modules = entry.is_node_module,
- };
+ // Fail now if there is no directory to resolve in. This can happen for
+ // virtual modules (e.g. stdin) if a resolve directory is not specified.
+ if (source_dir.len == 0) {
+ if (r.debug_logs) |*debug| {
+ debug.addNote("Cannot resolve this path without a directory") catch {};
+ }
+ r.flushDebugLogs(.fail) catch {};
+ return error.MissingResolveDir;
}
- return null;
+ r.mutex.lock();
+ defer r.mutex.unlock();
+
+ var result = try r.resolveWithoutSymlinks(source_dir, import_path, kind);
+
+ return result orelse error.ModuleNotFound;
}
- // Check both relative and package paths for CSS URL tokens, with relative
- // paths taking precedence over package paths to match Webpack behavior.
- const is_package_path = isPackagePath(import_path);
- var check_relative = !is_package_path or kind == .url;
- var check_package = is_package_path;
-
- if (check_relative) {
- const parts = [_]string{ source_dir, import_path };
- const abs_path = r.fs.absBuf(&parts, &relative_abs_path_buf);
-
- if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.exists(abs_path)) {
- // If the string literal in the source text is an absolute path and has
- // been marked as an external module, mark it as *not* an absolute path.
- // That way we preserve the literal text in the output and don't generate
- // a relative path from the output directory to that path.
+ pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !?Result {
+ // This implements the module resolution algorithm from node.js, which is
+ // described here: https://nodejs.org/api/modules.html#modules_all_together
+ var result: Result = Result{ .path_pair = PathPair{ .primary = Path.init("") } };
+
+ // Return early if this is already an absolute path. In addition to asking
+ // the file system whether this is an absolute path, we also explicitly check
+ // whether it starts with a "/" and consider that an absolute path too. This
+ // is because relative paths can technically start with a "/" on Windows
+ // because it's not an absolute path on Windows. Then people might write code
+ // with imports that start with a "/" that works fine on Windows only to
+ // experience unexpected build failures later on other operating systems.
+ // Treating these paths as absolute paths on all platforms means Windows
+ // users will not be able to accidentally make use of these paths.
+ if (strings.startsWith(import_path, "/") or std.fs.path.isAbsolutePosix(import_path)) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path}) catch {};
+ debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path}) catch {};
}
- return Result{
- .path_pair = .{ .primary = Path.init(r.fs.filename_store.append(abs_path) catch unreachable) },
- .is_external = true,
- };
- }
-
- // Check the "browser" map for the first time (1 out of 2)
- if (r.dirInfoCached(std.fs.path.dirname(abs_path) orelse unreachable) catch null) |_import_dir_info| {
- if (_import_dir_info.getEnclosingBrowserScope()) |import_dir_info| {
- if (import_dir_info.package_json) |pkg| {
- const pkg_json_dir = std.fs.path.dirname(pkg.source.key_path.text) orelse unreachable;
-
- const rel_path = r.fs.relative(pkg_json_dir, abs_path);
- if (r.checkBrowserMap(pkg, rel_path)) |remap| {
- // Is the path disabled?
- if (remap.len == 0) {
- var _path = Path.init(r.fs.filename_store.append(abs_path) catch unreachable);
- _path.is_disabled = true;
+ // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
+ if ((r.dirInfoCached(source_dir) catch null)) |_dir_info| {
+ const dir_info: *DirInfo = _dir_info;
+ if (dir_info.tsconfig_json) |tsconfig| {
+ if (tsconfig.paths.count() > 0) {
+ if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
return Result{
- .path_pair = PathPair{
- .primary = _path,
- },
- };
- }
-
- if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| {
- result = Result{
- .path_pair = _result.path_pair,
- .diff_case = _result.diff_case,
- .is_from_node_modules = _result.is_node_module,
- .module_type = pkg.module_type,
- .dirname_fd = _result.dirname_fd,
- .package_json_version = pkg.version,
+ .path_pair = res.path_pair,
+ .diff_case = res.diff_case,
+ .dirname_fd = dir_info.getFileDescriptor(),
+ .is_from_node_modules = res.is_node_module,
};
- check_relative = false;
- check_package = false;
}
}
}
}
+
+ if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.exists(import_path)) {
+ // If the string literal in the source text is an absolute path and has
+ // been marked as an external module, mark it as *not* an absolute path.
+ // That way we preserve the literal text in the output and don't generate
+ // a relative path from the output directory to that path.
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path}) catch {};
+ }
+
+ return Result{
+ .path_pair = .{ .primary = Path.init(import_path) },
+ .is_external = true,
+ };
+ }
+
+ // Run node's resolution rules (e.g. adding ".js")
+ if (r.loadAsFileOrDirectory(import_path, kind)) |entry| {
+ return Result{
+ .dirname_fd = entry.dirname_fd,
+ .path_pair = entry.path_pair,
+ .diff_case = entry.diff_case,
+ .is_from_node_modules = entry.is_node_module,
+ };
+ }
+
+ return null;
}
+ // Check both relative and package paths for CSS URL tokens, with relative
+ // paths taking precedence over package paths to match Webpack behavior.
+ const is_package_path = isPackagePath(import_path);
+ var check_relative = !is_package_path or kind == .url;
+ var check_package = is_package_path;
+
if (check_relative) {
- if (r.loadAsFileOrDirectory(abs_path, kind)) |res| {
- check_package = false;
- result = Result{
- .path_pair = res.path_pair,
- .diff_case = res.diff_case,
- .is_from_node_modules = res.is_node_module,
- .dirname_fd = res.dirname_fd,
- .package_json_version = res.package_json_version,
+ const parts = [_]string{ source_dir, import_path };
+ const abs_path = r.fs.absBuf(&parts, &relative_abs_path_buf);
+
+ if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.exists(abs_path)) {
+ // If the string literal in the source text is an absolute path and has
+ // been marked as an external module, mark it as *not* an absolute path.
+ // That way we preserve the literal text in the output and don't generate
+ // a relative path from the output directory to that path.
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path}) catch {};
+ }
+
+ return Result{
+ .path_pair = .{ .primary = Path.init(r.fs.filename_store.append(abs_path) catch unreachable) },
+ .is_external = true,
};
- } else if (!check_package) {
- return null;
}
- }
- }
- if (check_package) {
- // Check for external packages first
- if (r.opts.external.node_modules.count() > 0) {
- var query = import_path;
- while (true) {
- if (r.opts.external.node_modules.exists(query)) {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {};
+ // Check the "browser" map for the first time (1 out of 2)
+ if (r.dirInfoCached(std.fs.path.dirname(abs_path) orelse unreachable) catch null) |_import_dir_info| {
+ if (_import_dir_info.getEnclosingBrowserScope()) |import_dir_info| {
+ if (import_dir_info.package_json) |pkg| {
+ const pkg_json_dir = std.fs.path.dirname(pkg.source.key_path.text) orelse unreachable;
+
+ const rel_path = r.fs.relative(pkg_json_dir, abs_path);
+ if (r.checkBrowserMap(pkg, rel_path)) |remap| {
+ // Is the path disabled?
+ if (remap.len == 0) {
+ var _path = Path.init(r.fs.filename_store.append(abs_path) catch unreachable);
+ _path.is_disabled = true;
+ return Result{
+ .path_pair = PathPair{
+ .primary = _path,
+ },
+ };
+ }
+
+ if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| {
+ result = Result{
+ .path_pair = _result.path_pair,
+ .diff_case = _result.diff_case,
+ .is_from_node_modules = _result.is_node_module,
+ .module_type = pkg.module_type,
+ .dirname_fd = _result.dirname_fd,
+ .package_json_version = pkg.version,
+ };
+ check_relative = false;
+ check_package = false;
+ }
+ }
}
- return Result{
- .path_pair = .{ .primary = Path.init(query) },
- .is_external = true,
- };
}
+ }
- // If the module "foo" has been marked as external, we also want to treat
- // paths into that module such as "foo/bar" as external too.
- var slash = strings.lastIndexOfChar(query, '/') orelse break;
- query = query[0..slash];
+ if (check_relative) {
+ if (r.loadAsFileOrDirectory(abs_path, kind)) |res| {
+ check_package = false;
+ result = Result{
+ .path_pair = res.path_pair,
+ .diff_case = res.diff_case,
+ .is_from_node_modules = res.is_node_module,
+ .dirname_fd = res.dirname_fd,
+ .package_json_version = res.package_json_version,
+ };
+ } else if (!check_package) {
+ return null;
+ }
}
}
- const source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return null;
-
- // Support remapping one package path to another via the "browser" field
- if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| {
- if (browser_scope.package_json) |package_json| {
- if (r.checkBrowserMap(package_json, import_path)) |remapped| {
- if (remapped.len == 0) {
- // "browser": {"module": false}
- if (r.loadNodeModules(import_path, kind, source_dir_info)) |node_module| {
- var pair = node_module.path_pair;
- pair.primary.is_disabled = true;
- if (pair.secondary != null) {
- pair.secondary.?.is_disabled = true;
+ if (check_package) {
+ // Check for external packages first
+ if (r.opts.external.node_modules.count() > 0) {
+ var query = import_path;
+ while (true) {
+ if (r.opts.external.node_modules.exists(query)) {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {};
+ }
+ return Result{
+ .path_pair = .{ .primary = Path.init(query) },
+ .is_external = true,
+ };
+ }
+
+ // If the module "foo" has been marked as external, we also want to treat
+ // paths into that module such as "foo/bar" as external too.
+ var slash = strings.lastIndexOfChar(query, '/') orelse break;
+ query = query[0..slash];
+ }
+ }
+
+ const source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return null;
+
+ // Support remapping one package path to another via the "browser" field
+ if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| {
+ if (browser_scope.package_json) |package_json| {
+ if (r.checkBrowserMap(package_json, import_path)) |remapped| {
+ if (remapped.len == 0) {
+ // "browser": {"module": false}
+ if (r.loadNodeModules(import_path, kind, source_dir_info)) |node_module| {
+ var pair = node_module.path_pair;
+ pair.primary.is_disabled = true;
+ if (pair.secondary != null) {
+ pair.secondary.?.is_disabled = true;
+ }
+ return Result{
+ .path_pair = pair,
+ .dirname_fd = node_module.dirname_fd,
+ .diff_case = node_module.diff_case,
+ .is_from_node_modules = true,
+ .package_json_version = package_json.version,
+ };
}
+ } else {
+ var primary = Path.init(import_path);
+ primary.is_disabled = true;
return Result{
- .path_pair = pair,
- .dirname_fd = node_module.dirname_fd,
- .diff_case = node_module.diff_case,
- .is_from_node_modules = true,
- .package_json_version = package_json.version,
+ .path_pair = PathPair{ .primary = primary },
+ // this might not be null? i think it is
+ .diff_case = null,
};
}
- } else {
- var primary = Path.init(import_path);
- primary.is_disabled = true;
- return Result{
- .path_pair = PathPair{ .primary = primary },
- // this might not be null? i think it is
- .diff_case = null,
- };
}
}
}
- }
- if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| {
- result = Result{
- .path_pair = res.path_pair,
- .diff_case = res.diff_case,
- .is_from_node_modules = res.is_node_module,
- .dirname_fd = res.dirname_fd,
- .package_json_version = res.package_json_version,
- };
- } else {
- // Note: node's "self references" are not currently supported
- return null;
+ if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| {
+ result = Result{
+ .path_pair = res.path_pair,
+ .diff_case = res.diff_case,
+ .is_from_node_modules = res.is_node_module,
+ .dirname_fd = res.dirname_fd,
+ .package_json_version = res.package_json_version,
+ };
+ } else {
+ // Note: node's "self references" are not currently supported
+ return null;
+ }
}
- }
- var iter = result.path_pair.iter();
- while (iter.next()) |*path| {
- const dirname = std.fs.path.dirname(path.text) orelse continue;
- const base_dir_info = ((r.dirInfoCached(dirname) catch null)) orelse continue;
- const dir_info = base_dir_info.getEnclosingBrowserScope() orelse continue;
- const pkg_json = dir_info.package_json orelse continue;
- const rel_path = r.fs.relative(pkg_json.source.key_path.text, path.text);
- result.module_type = pkg_json.module_type;
- result.package_json_version = if (result.package_json_version == null) pkg_json.version else result.package_json_version;
- if (r.checkBrowserMap(pkg_json, rel_path)) |remapped| {
- if (remapped.len == 0) {
- path.is_disabled = true;
- } else if (r.resolveWithoutRemapping(dir_info, remapped, kind)) |remapped_result| {
- result.is_from_node_modules = remapped_result.is_node_module;
-
- switch (iter.index) {
- 0 => {
- result.path_pair.primary = remapped_result.path_pair.primary;
- result.dirname_fd = remapped_result.dirname_fd;
- },
- else => {
- result.path_pair.secondary = remapped_result.path_pair.primary;
- },
+ var iter = result.path_pair.iter();
+ while (iter.next()) |*path| {
+ const dirname = std.fs.path.dirname(path.text) orelse continue;
+ const base_dir_info = ((r.dirInfoCached(dirname) catch null)) orelse continue;
+ const dir_info = base_dir_info.getEnclosingBrowserScope() orelse continue;
+ const pkg_json = dir_info.package_json orelse continue;
+ const rel_path = r.fs.relative(pkg_json.source.key_path.text, path.text);
+ result.module_type = pkg_json.module_type;
+ result.package_json_version = if (result.package_json_version == null) pkg_json.version else result.package_json_version;
+ if (r.checkBrowserMap(pkg_json, rel_path)) |remapped| {
+ if (remapped.len == 0) {
+ path.is_disabled = true;
+ } else if (r.resolveWithoutRemapping(dir_info, remapped, kind)) |remapped_result| {
+ result.is_from_node_modules = remapped_result.is_node_module;
+
+ switch (iter.index) {
+ 0 => {
+ result.path_pair.primary = remapped_result.path_pair.primary;
+ result.dirname_fd = remapped_result.dirname_fd;
+ },
+ else => {
+ result.path_pair.secondary = remapped_result.path_pair.primary;
+ },
+ }
}
}
}
- }
-
- return result;
- }
- pub fn loadNodeModules(r: *Resolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult {
- var res = _loadNodeModules(r, import_path, kind, _dir_info) orelse return null;
- res.is_node_module = true;
- return res;
- }
-
- threadlocal var load_as_file_or_directory_via_tsconfig_base_path: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ return result;
+ }
- pub fn _loadNodeModules(r: *Resolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult {
- var dir_info = _dir_info;
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {};
- debug.increaseIndent() catch {};
+ pub fn loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult {
+ var res = _loadNodeModules(r, import_path, kind, _dir_info) orelse return null;
+ res.is_node_module = true;
+ return res;
}
- defer {
+ threadlocal var load_as_file_or_directory_via_tsconfig_base_path: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+
+ pub fn _loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult {
+ var dir_info = _dir_info;
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {};
+ debug.increaseIndent() catch {};
}
- }
- // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
-
- if (dir_info.tsconfig_json) |tsconfig| {
- // Try path substitutions first
- if (tsconfig.paths.count() > 0) {
- if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
- return res;
+ defer {
+ if (r.debug_logs) |*debug| {
+ debug.decreaseIndent() catch {};
}
}
- // Try looking up the path relative to the base URL
- if (tsconfig.hasBaseURL()) {
- const base = tsconfig.base_url;
- const paths = [_]string{ base, import_path };
- const abs = r.fs.absBuf(&paths, &load_as_file_or_directory_via_tsconfig_base_path);
+ // First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
- if (r.loadAsFileOrDirectory(abs, kind)) |res| {
- return res;
+ if (dir_info.tsconfig_json) |tsconfig| {
+ // Try path substitutions first
+ if (tsconfig.paths.count() > 0) {
+ if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
+ return res;
+ }
}
- // r.allocator.free(abs);
- }
- }
- // Then check for the package in any enclosing "node_modules" directories
- while (true) {
- // Skip directories that are themselves called "node_modules", since we
- // don't ever want to search for "node_modules/node_modules"
- if (dir_info.has_node_modules) {
- var _paths = [_]string{ dir_info.abs_path, "node_modules", import_path };
- const abs_path = r.fs.abs(&_paths);
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path}) catch {};
+ // Try looking up the path relative to the base URL
+ if (tsconfig.hasBaseURL()) {
+ const base = tsconfig.base_url;
+ const paths = [_]string{ base, import_path };
+ const abs = r.fs.absBuf(&paths, &load_as_file_or_directory_via_tsconfig_base_path);
+
+ if (r.loadAsFileOrDirectory(abs, kind)) |res| {
+ return res;
+ }
+ // r.allocator.free(abs);
}
+ }
- // TODO: esm "exports" field goes here!!! Here!!
+ // Then check for the package in any enclosing "node_modules" directories
+ while (true) {
+ // Skip directories that are themselves called "node_modules", since we
+ // don't ever want to search for "node_modules/node_modules"
+ if (dir_info.has_node_modules) {
+ var _paths = [_]string{ dir_info.abs_path, "node_modules", import_path };
+ const abs_path = r.fs.abs(&_paths);
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path}) catch {};
+ }
+
+ // TODO: esm "exports" field goes here!!! Here!!
- if (r.loadAsFileOrDirectory(abs_path, kind)) |res| {
- return res;
+ if (r.loadAsFileOrDirectory(abs_path, kind)) |res| {
+ return res;
+ }
+ // r.allocator.free(abs_path);
}
- // r.allocator.free(abs_path);
+
+ dir_info = dir_info.getParent() orelse break;
}
- dir_info = dir_info.getParent() orelse break;
+ // Mostly to cut scope, we don't resolve `NODE_PATH` environment variable.
+ // But also: https://github.com/nodejs/node/issues/38128#issuecomment-814969356
+
+ return null;
}
- // Mostly to cut scope, we don't resolve `NODE_PATH` environment variable.
- // But also: https://github.com/nodejs/node/issues/38128#issuecomment-814969356
+ threadlocal var resolve_without_remapping_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ pub fn resolveWithoutRemapping(r: *ThisResolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult {
+ if (isPackagePath(import_path)) {
+ return r.loadNodeModules(import_path, kind, source_dir_info);
+ } else {
+ const paths = [_]string{ source_dir_info.abs_path, import_path };
+ var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf);
+ return r.loadAsFileOrDirectory(resolved, kind);
+ }
+ }
+
+ pub fn parseTSConfig(
+ r: *ThisResolver,
+ file: string,
+ dirname_fd: StoredFileDescriptorType,
+ ) !?*TSConfigJSON {
+ const entry = try r.caches.fs.readFile(
+ r.fs,
+ file,
+ dirname_fd,
+ false,
+ );
+ const key_path = Path.init(file);
- return null;
- }
+ const source = logger.Source.initPathString(key_path.text, entry.contents);
+ const file_dir = std.fs.path.dirname(file) orelse return null;
- threadlocal var resolve_without_remapping_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
- pub fn resolveWithoutRemapping(r: *Resolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult {
- if (isPackagePath(import_path)) {
- return r.loadNodeModules(import_path, kind, source_dir_info);
- } else {
- const paths = [_]string{ source_dir_info.abs_path, import_path };
- var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf);
- return r.loadAsFileOrDirectory(resolved, kind);
- }
- }
+ var result = (try TSConfigJSON.parse(r.allocator, r.log, source, @TypeOf(r.caches.json), &r.caches.json)) orelse return null;
- pub const TSConfigExtender = struct {
- visited: *StringBoolMap,
- file_dir: string,
- r: *Resolver,
-
- pub fn extends(ctx: *TSConfigExtender, extends: String, range: logger.Range) ?*TSConfigJSON {
- Global.notimpl();
- // if (isPackagePath(extends)) {
- // // // If this is a package path, try to resolve it to a "node_modules"
- // // // folder. This doesn't use the normal node module resolution algorithm
- // // // both because it's different (e.g. we don't want to match a directory)
- // // // and because it would deadlock since we're currently in the middle of
- // // // populating the directory info cache.
- // // var current = ctx.file_dir;
- // // while (true) {
- // // // Skip "node_modules" folders
- // // if (!strings.eql(std.fs.path.basename(current), "node_modules")) {
- // // var paths1 = [_]string{ current, "node_modules", extends };
- // // var join1 = r.fs.absAlloc(ctx.r.allocator, &paths1) catch unreachable;
- // // const res = ctx.r.parseTSConfig(join1, ctx.1) catch |err| {
- // // if (err == error.ENOENT) {
- // // continue;
- // // } else if (err == error.ParseErrorImportCycle) {} else if (err != error.ParseErrorAlreadyLogged) {}
- // // return null;
- // // };
- // // return res;
-
- // // }
- // // }
- // }
- }
- };
+ if (result.hasBaseURL()) {
+ // this might leak
+ if (!std.fs.path.isAbsolute(result.base_url)) {
+ const paths = [_]string{ file_dir, result.base_url };
+ result.base_url = r.fs.filename_store.append(r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
+ }
+ }
- threadlocal var tsconfig_base_url_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
- pub fn parseTSConfig(
- r: *Resolver,
- file: string,
- dirname_fd: StoredFileDescriptorType,
- ) !?*TSConfigJSON {
- const entry = try r.caches.fs.readFile(
- r.fs,
- file,
- dirname_fd,
- );
- const key_path = Path.init(file);
-
- const source = logger.Source.initPathString(key_path.text, entry.contents);
- const file_dir = std.fs.path.dirname(file) orelse return null;
-
- var result = (try TSConfigJSON.parse(r.allocator, r.log, source, &r.caches.json)) orelse return null;
-
- if (result.hasBaseURL()) {
- // this might leak
- if (!std.fs.path.isAbsolute(result.base_url)) {
+ if (result.paths.count() > 0 and (result.base_url_for_paths.len == 0 or !std.fs.path.isAbsolute(result.base_url_for_paths))) {
+ // this might leak
const paths = [_]string{ file_dir, result.base_url };
- result.base_url = r.fs.filename_store.append(r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
+ result.base_url_for_paths = r.fs.filename_store.append(r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
}
- }
- if (result.paths.count() > 0 and (result.base_url_for_paths.len == 0 or !std.fs.path.isAbsolute(result.base_url_for_paths))) {
- // this might leak
- const paths = [_]string{ file_dir, result.base_url };
- result.base_url_for_paths = r.fs.filename_store.append(r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
+ return result;
}
- return result;
- }
-
- // TODO:
- pub fn prettyPath(r: *Resolver, path: Path) string {
- return path.text;
- }
-
- pub fn parsePackageJSON(r: *Resolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
- const pkg = PackageJSON.parse(r, file, dirname_fd) orelse return null;
- var _pkg = try r.allocator.create(PackageJSON);
- _pkg.* = pkg;
- return _pkg;
- }
-
- pub fn isPackagePath(path: string) bool {
- // this could probably be flattened into something more optimized
- return path[0] != '/' and !strings.startsWith(path, "./") and !strings.startsWith(path, "../") and !strings.eql(path, ".") and !strings.eql(path, "..");
- }
-
- pub const DirEntryResolveQueueItem = struct { result: allocators.Result, unsafe_path: string };
- threadlocal var _dir_entry_paths_to_resolve: [256]DirEntryResolveQueueItem = undefined;
- threadlocal var _open_dirs: [256]std.fs.Dir = undefined;
+ // TODO:
+ pub fn prettyPath(r: *ThisResolver, path: Path) string {
+ return path.text;
+ }
- fn dirInfoCached(r: *Resolver, path: string) !?*DirInfo {
- const top_result = try r.dir_cache.getOrPut(path);
- if (top_result.status != .unknown) {
- return r.dir_cache.atIndex(top_result.index);
+ pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
+ const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd) orelse return null;
+ var _pkg = try r.allocator.create(PackageJSON);
+ _pkg.* = pkg;
+ return _pkg;
}
- var i: i32 = 1;
- _dir_entry_paths_to_resolve[0] = (DirEntryResolveQueueItem{ .result = top_result, .unsafe_path = path });
- var top = path;
- var top_parent: allocators.Result = allocators.Result{
- .index = allocators.NotFound,
- .hash = 0,
- .status = .not_found,
- };
- const root_path = if (isWindows) std.fs.path.diskDesignator(path) else "/";
+ pub fn isPackagePath(path: string) bool {
+ // this could probably be flattened into something more optimized
+ return path[0] != '/' and !strings.startsWith(path, "./") and !strings.startsWith(path, "../") and !strings.eql(path, ".") and !strings.eql(path, "..");
+ }
- while (std.fs.path.dirname(top)) |_top| {
- var result = try r.dir_cache.getOrPut(_top);
- if (result.status != .unknown) {
- top_parent = result;
- break;
+ fn dirInfoCached(r: *ThisResolver, path: string) !?*DirInfo {
+ const top_result = try r.dir_cache.getOrPut(path);
+ if (top_result.status != .unknown) {
+ return r.dir_cache.atIndex(top_result.index);
}
- _dir_entry_paths_to_resolve[@intCast(usize, i)] = DirEntryResolveQueueItem{
- .unsafe_path = _top,
- .result = result,
+
+ var i: i32 = 1;
+ _dir_entry_paths_to_resolve[0] = (DirEntryResolveQueueItem{ .result = top_result, .unsafe_path = path });
+ var top = path;
+ var top_parent: allocators.Result = allocators.Result{
+ .index = allocators.NotFound,
+ .hash = 0,
+ .status = .not_found,
};
- i += 1;
- top = _top;
- }
+ const root_path = if (isWindows) std.fs.path.diskDesignator(path) else "/";
- if (std.fs.path.dirname(top) == null and !strings.eql(top, root_path)) {
- var result = try r.dir_cache.getOrPut(root_path);
- if (result.status != .unknown) {
- top_parent = result;
- } else {
+ while (std.fs.path.dirname(top)) |_top| {
+ var result = try r.dir_cache.getOrPut(_top);
+ if (result.status != .unknown) {
+ top_parent = result;
+ break;
+ }
_dir_entry_paths_to_resolve[@intCast(usize, i)] = DirEntryResolveQueueItem{
- .unsafe_path = root_path,
+ .unsafe_path = _top,
.result = result,
};
i += 1;
- top = root_path;
+ top = _top;
}
- }
-
- var queue_slice: []DirEntryResolveQueueItem = _dir_entry_paths_to_resolve[0..@intCast(usize, i)];
- std.debug.assert(queue_slice.len > 0);
- var open_dir_count: usize = 0;
-
- // When this function halts, any item not processed means it's not found.
- defer {
- // Anything
- if (open_dir_count > 0 and r.fs.fs.needToCloseFiles()) {
- var open_dirs: []std.fs.Dir = _open_dirs[0..open_dir_count];
- for (open_dirs) |*open_dir| {
- open_dir.close();
+ if (std.fs.path.dirname(top) == null and !strings.eql(top, root_path)) {
+ var result = try r.dir_cache.getOrPut(root_path);
+ if (result.status != .unknown) {
+ top_parent = result;
+ } else {
+ _dir_entry_paths_to_resolve[@intCast(usize, i)] = DirEntryResolveQueueItem{
+ .unsafe_path = root_path,
+ .result = result,
+ };
+ i += 1;
+ top = root_path;
}
}
- }
- var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
-
- rfs.entries_mutex.lock();
- defer rfs.entries_mutex.unlock();
-
- // We want to walk in a straight line from the topmost directory to the desired directory
- // For each directory we visit, we get the entries, but not traverse into child directories
- // (unless those child directores are in the queue)
- // Going top-down rather than bottom-up should have best performance because we can use
- // the file handle from the parent directory to open the child directory
- // It's important that we walk in precisely a straight line
- // For example
- // "/home/jarred/Code/node_modules/react/cjs/react.development.js"
- // ^
- // If we start there, we will traverse all of /home/jarred, including e.g. /home/jarred/Downloads
- // which is completely irrelevant.
-
- // After much experimentation, fts_open is not the fastest way. fts actually just uses readdir!!
- var _safe_path: ?string = null;
-
- // Start at the top.
- while (queue_slice.len > 0) {
- var queue_top = queue_slice[queue_slice.len - 1];
- defer top_parent = queue_top.result;
- queue_slice.len -= 1;
-
- var _open_dir: anyerror!std.fs.Dir = undefined;
- if (open_dir_count > 0) {
- _open_dir = _open_dirs[open_dir_count - 1].openDir(std.fs.path.basename(queue_top.unsafe_path), .{ .iterate = true });
- } else {
- _open_dir = std.fs.openDirAbsolute(queue_top.unsafe_path, .{ .iterate = true });
- }
+ var queue_slice: []DirEntryResolveQueueItem = _dir_entry_paths_to_resolve[0..@intCast(usize, i)];
+ std.debug.assert(queue_slice.len > 0);
+ var open_dir_count: usize = 0;
- const open_dir = _open_dir catch |err| {
- switch (err) {
- error.EACCESS => {},
-
- // Ignore "ENOTDIR" here so that calling "ReadDirectory" on a file behaves
- // as if there is nothing there at all instead of causing an error due to
- // the directory actually being a file. This is a workaround for situations
- // where people try to import from a path containing a file as a parent
- // directory. The "pnpm" package manager generates a faulty "NODE_PATH"
- // list which contains such paths and treating them as missing means we just
- // ignore them during path resolution.
- error.ENOENT,
- error.ENOTDIR,
- error.IsDir,
- error.NotDir,
- error.FileNotFound,
- => {
- return null;
- },
+ // When this function halts, any item not processed means it's not found.
+ defer {
- else => {
- var cached_dir_entry_result = rfs.entries.getOrPut(queue_top.unsafe_path) catch unreachable;
- r.dir_cache.markNotFound(queue_top.result);
- rfs.entries.markNotFound(cached_dir_entry_result);
- const pretty = r.prettyPath(Path.init(queue_top.unsafe_path));
-
- r.log.addErrorFmt(
- null,
- logger.Loc{},
- r.allocator,
- "Cannot read directory \"{s}\": {s}",
- .{
- pretty,
- @errorName(err),
- },
- ) catch {};
- },
+ // Anything
+ if (open_dir_count > 0 and r.fs.fs.needToCloseFiles()) {
+ var open_dirs: []std.fs.Dir = _open_dirs[0..open_dir_count];
+ for (open_dirs) |*open_dir| {
+ open_dir.close();
+ }
}
-
- return null;
- };
- Fs.FileSystem.setMaxFd(open_dir.fd);
- // these objects mostly just wrap the file descriptor, so it's fine to keep it.
- _open_dirs[open_dir_count] = open_dir;
- open_dir_count += 1;
-
- if (_safe_path == null) {
- // Now that we've opened the topmost directory successfully, it's reasonable to store the slice.
- _safe_path = try r.fs.dirname_store.append(path);
}
- const safe_path = _safe_path.?;
-
- var dir_path_i = std.mem.indexOf(u8, safe_path, queue_top.unsafe_path) orelse unreachable;
- const dir_path = safe_path[dir_path_i .. dir_path_i + queue_top.unsafe_path.len];
- var dir_iterator = open_dir.iterate();
+ var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
- var cached_dir_entry_result = rfs.entries.getOrPut(dir_path) catch unreachable;
+ rfs.entries_mutex.lock();
+ defer rfs.entries_mutex.unlock();
- var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined;
- var has_dir_entry_result: bool = false;
+ // We want to walk in a straight line from the topmost directory to the desired directory
+ // For each directory we visit, we get the entries, but not traverse into child directories
+ // (unless those child directores are in the queue)
+ // Going top-down rather than bottom-up should have best performance because we can use
+ // the file handle from the parent directory to open the child directory
+ // It's important that we walk in precisely a straight line
+ // For example
+ // "/home/jarred/Code/node_modules/react/cjs/react.development.js"
+ // ^
+ // If we start there, we will traverse all of /home/jarred, including e.g. /home/jarred/Downloads
+ // which is completely irrelevant.
- if (rfs.entries.atIndex(cached_dir_entry_result.index)) |cached_entry| {
- if (std.meta.activeTag(cached_entry.*) == .entries) {
- dir_entries_option = cached_entry;
- }
- }
+ // After much experimentation, fts_open is not the fastest way. fts actually just uses readdir!!
+ var _safe_path: ?string = null;
- if (!has_dir_entry_result) {
- dir_entries_option = try rfs.entries.put(&cached_dir_entry_result, .{
- .entries = Fs.FileSystem.DirEntry.init(dir_path, r.fs.allocator),
- });
+ // Start at the top.
+ while (queue_slice.len > 0) {
+ var queue_top = queue_slice[queue_slice.len - 1];
+ defer top_parent = queue_top.result;
+ queue_slice.len -= 1;
- if (FeatureFlags.store_file_descriptors) {
- Fs.FileSystem.setMaxFd(open_dir.fd);
- dir_entries_option.entries.fd = open_dir.fd;
+ var _open_dir: anyerror!std.fs.Dir = undefined;
+ if (open_dir_count > 0) {
+ _open_dir = _open_dirs[open_dir_count - 1].openDir(std.fs.path.basename(queue_top.unsafe_path), .{ .iterate = true });
+ } else {
+ _open_dir = std.fs.openDirAbsolute(queue_top.unsafe_path, .{ .iterate = true });
}
- has_dir_entry_result = true;
- }
+ const open_dir = _open_dir catch |err| {
+ switch (err) {
+ error.EACCESS => {},
+
+ // Ignore "ENOTDIR" here so that calling "ReadDirectory" on a file behaves
+ // as if there is nothing there at all instead of causing an error due to
+ // the directory actually being a file. This is a workaround for situations
+ // where people try to import from a path containing a file as a parent
+ // directory. The "pnpm" package manager generates a faulty "NODE_PATH"
+ // list which contains such paths and treating them as missing means we just
+ // ignore them during path resolution.
+ error.ENOENT,
+ error.ENOTDIR,
+ error.IsDir,
+ error.NotDir,
+ error.FileNotFound,
+ => {
+ return null;
+ },
- while (try dir_iterator.next()) |_value| {
- const value: std.fs.Dir.Entry = _value;
- dir_entries_option.entries.addEntry(value) catch unreachable;
- }
+ else => {
+ var cached_dir_entry_result = rfs.entries.getOrPut(queue_top.unsafe_path) catch unreachable;
+ r.dir_cache.markNotFound(queue_top.result);
+ rfs.entries.markNotFound(cached_dir_entry_result);
+ const pretty = r.prettyPath(Path.init(queue_top.unsafe_path));
+
+ r.log.addErrorFmt(
+ null,
+ logger.Loc{},
+ r.allocator,
+ "Cannot read directory \"{s}\": {s}",
+ .{
+ pretty,
+ @errorName(err),
+ },
+ ) catch {};
+ },
+ }
- const dir_info = try r.dirInfoUncached(
- dir_path,
- dir_entries_option,
- queue_top.result,
- cached_dir_entry_result.index,
- r.dir_cache.atIndex(top_parent.index),
- top_parent.index,
- open_dir.fd,
- );
+ return null;
+ };
+ Fs.FileSystem.setMaxFd(open_dir.fd);
+ // these objects mostly just wrap the file descriptor, so it's fine to keep it.
+ _open_dirs[open_dir_count] = open_dir;
+ open_dir_count += 1;
+
+ if (_safe_path == null) {
+ // Now that we've opened the topmost directory successfully, it's reasonable to store the slice.
+ _safe_path = try r.fs.dirname_store.append(path);
+ }
+ const safe_path = _safe_path.?;
- var dir_info_ptr = try r.dir_cache.put(&queue_top.result, dir_info);
+ var dir_path_i = std.mem.indexOf(u8, safe_path, queue_top.unsafe_path) orelse unreachable;
+ const dir_path = safe_path[dir_path_i .. dir_path_i + queue_top.unsafe_path.len];
- if (queue_slice.len == 0) {
- return dir_info_ptr;
+ var dir_iterator = open_dir.iterate();
- // Is the directory we're searching for actually a file?
- } else if (queue_slice.len == 1) {
- // const next_in_queue = queue_slice[0];
- // const next_basename = std.fs.path.basename(next_in_queue.unsafe_path);
- // if (dir_info_ptr.getEntries()) |entries| {
- // if (entries.get(next_basename) != null) {
- // return null;
- // }
- // }
- }
- }
+ var cached_dir_entry_result = rfs.entries.getOrPut(dir_path) catch unreachable;
- unreachable;
- }
+ var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined;
+ var has_dir_entry_result: bool = false;
- pub const MatchResult = struct {
- path_pair: PathPair,
- dirname_fd: StoredFileDescriptorType = 0,
- file_fd: StoredFileDescriptorType = 0,
- is_node_module: bool = false,
- package_json_version: ?string = null,
- diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
- };
+ if (rfs.entries.atIndex(cached_dir_entry_result.index)) |cached_entry| {
+ if (std.meta.activeTag(cached_entry.*) == .entries) {
+ dir_entries_option = cached_entry;
+ }
+ }
- // This closely follows the behavior of "tryLoadModuleUsingPaths()" in the
- // official TypeScript compiler
- pub fn matchTSConfigPaths(r: *Resolver, tsconfig: *TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable;
- }
+ if (!has_dir_entry_result) {
+ dir_entries_option = try rfs.entries.put(&cached_dir_entry_result, .{
+ .entries = Fs.FileSystem.DirEntry.init(dir_path, r.fs.allocator),
+ });
- var abs_base_url = tsconfig.base_url_for_paths;
+ if (FeatureFlags.store_file_descriptors) {
+ Fs.FileSystem.setMaxFd(open_dir.fd);
+ dir_entries_option.entries.fd = open_dir.fd;
+ }
- // The explicit base URL should take precedence over the implicit base URL
- // if present. This matters when a tsconfig.json file overrides "baseUrl"
- // from another extended tsconfig.json file but doesn't override "paths".
- if (tsconfig.hasBaseURL()) {
- abs_base_url = tsconfig.base_url;
- }
+ has_dir_entry_result = true;
+ }
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url}) catch unreachable;
- }
+ while (try dir_iterator.next()) |_value| {
+ const value: std.fs.Dir.Entry = _value;
+ dir_entries_option.entries.addEntry(value) catch unreachable;
+ }
- // Check for exact matches first
- {
- var iter = tsconfig.paths.iterator();
- while (iter.next()) |entry| {
- const key = entry.key;
+ const dir_info = try r.dirInfoUncached(
+ dir_path,
+ dir_entries_option,
+ queue_top.result,
+ cached_dir_entry_result.index,
+ r.dir_cache.atIndex(top_parent.index),
+ top_parent.index,
+ open_dir.fd,
+ );
- if (strings.eql(key, path)) {
- for (entry.value) |original_path| {
- var absolute_original_path = original_path;
- var was_alloc = false;
+ var dir_info_ptr = try r.dir_cache.put(&queue_top.result, dir_info);
- if (!std.fs.path.isAbsolute(absolute_original_path)) {
- const parts = [_]string{ abs_base_url, original_path };
- absolute_original_path = r.fs.absAlloc(r.allocator, &parts) catch unreachable;
- was_alloc = true;
- }
+ if (queue_slice.len == 0) {
+ return dir_info_ptr;
- if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| {
- return res;
- } else if (was_alloc) {
- r.allocator.free(absolute_original_path);
- }
- }
-
- return null;
+ // Is the directory we're searching for actually a file?
+ } else if (queue_slice.len == 1) {
+ // const next_in_queue = queue_slice[0];
+ // const next_basename = std.fs.path.basename(next_in_queue.unsafe_path);
+ // if (dir_info_ptr.getEntries()) |entries| {
+ // if (entries.get(next_basename) != null) {
+ // return null;
+ // }
+ // }
}
}
- }
- const TSConfigMatch = struct {
- prefix: string,
- suffix: string,
- original_paths: []string,
- };
-
- var longest_match: TSConfigMatch = undefined;
- var longest_match_prefix_length: i32 = -1;
- var longest_match_suffix_length: i32 = -1;
-
- var iter = tsconfig.paths.iterator();
- while (iter.next()) |entry| {
- const key = entry.key;
- const original_paths = entry.value;
-
- if (strings.indexOfChar(key, '*')) |star_index| {
- const prefix = key[0..star_index];
- const suffix = key[star_index..key.len];
-
- // Find the match with the longest prefix. If two matches have the same
- // prefix length, pick the one with the longest suffix. This second edge
- // case isn't handled by the TypeScript compiler, but we handle it
- // because we want the output to always be deterministic and Go map
- // iteration order is deliberately non-deterministic.
- if (strings.startsWith(path, prefix) and strings.endsWith(path, suffix) and (prefix.len > longest_match_prefix_length or (prefix.len == longest_match_prefix_length and suffix.len > longest_match_suffix_length))) {
- longest_match_prefix_length = @intCast(i32, prefix.len);
- longest_match_suffix_length = @intCast(i32, suffix.len);
- longest_match = TSConfigMatch{ .prefix = prefix, .suffix = suffix, .original_paths = original_paths };
- }
- }
+ unreachable;
}
- // If there is at least one match, only consider the one with the longest
- // prefix. This matches the behavior of the TypeScript compiler.
- if (longest_match_prefix_length > -1) {
+ // This closely follows the behavior of "tryLoadModuleUsingPaths()" in the
+ // official TypeScript compiler
+ pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix }) catch unreachable;
+ debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable;
}
- for (longest_match.original_paths) |original_path| {
- // Swap out the "*" in the original path for whatever the "*" matched
- const matched_text = path[longest_match.prefix.len .. path.len - longest_match.suffix.len];
+ var abs_base_url = tsconfig.base_url_for_paths;
- std.mem.copy(
- u8,
- &TemporaryBuffer.TSConfigMatchPathBuf,
- original_path,
- );
- var start: usize = 0;
- var total_length: usize = 0;
- const star = std.mem.indexOfScalar(u8, original_path, '*') orelse unreachable;
- total_length = star;
- std.mem.copy(u8, &TemporaryBuffer.TSConfigMatchPathBuf, original_path[0..total_length]);
- start = total_length;
- total_length += matched_text.len;
- std.mem.copy(u8, TemporaryBuffer.TSConfigMatchPathBuf[start..total_length], matched_text);
- start = total_length;
-
- total_length += original_path.len - star + 1; // this might be an off by one.
- std.mem.copy(u8, TemporaryBuffer.TSConfigMatchPathBuf[start..TemporaryBuffer.TSConfigMatchPathBuf.len], original_path[star..original_path.len]);
- const region = TemporaryBuffer.TSConfigMatchPathBuf[0..total_length];
-
- // Load the original path relative to the "baseUrl" from tsconfig.json
- var absolute_original_path: string = region;
-
- var did_allocate = false;
- if (!std.fs.path.isAbsolute(region)) {
- var paths = [_]string{ abs_base_url, original_path };
- absolute_original_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
- did_allocate = true;
- } else {
- absolute_original_path = std.mem.dupe(r.allocator, u8, region) catch unreachable;
- }
-
- if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| {
- return res;
- }
+ // The explicit base URL should take precedence over the implicit base URL
+ // if present. This matters when a tsconfig.json file overrides "baseUrl"
+ // from another extended tsconfig.json file but doesn't override "paths".
+ if (tsconfig.hasBaseURL()) {
+ abs_base_url = tsconfig.base_url;
}
- }
- return null;
- }
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url}) catch unreachable;
+ }
- pub const LoadResult = struct {
- path: string,
- diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase,
- dirname_fd: StoredFileDescriptorType = 0,
- };
+ // Check for exact matches first
+ {
+ var iter = tsconfig.paths.iterator();
+ while (iter.next()) |entry| {
+ const key = entry.key;
- pub fn checkBrowserMap(r: *Resolver, pkg: *PackageJSON, input_path: string) ?string {
- // Normalize the path so we can compare against it without getting confused by "./"
- var cleaned = r.fs.normalize(input_path);
- const original_cleaned = cleaned;
+ if (strings.eql(key, path)) {
+ for (entry.value) |original_path| {
+ var absolute_original_path = original_path;
+ var was_alloc = false;
- if (cleaned.len == 1 and cleaned[0] == '.') {
- // No bundler supports remapping ".", so we don't either
- return null;
- }
+ if (!std.fs.path.isAbsolute(absolute_original_path)) {
+ const parts = [_]string{ abs_base_url, original_path };
+ absolute_original_path = r.fs.absAlloc(r.allocator, &parts) catch unreachable;
+ was_alloc = true;
+ }
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for \"{s}\" in the \"browser\" map in \"{s}\"", .{ input_path, pkg.source.path.text }) catch {};
- }
+ if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| {
+ return res;
+ } else if (was_alloc) {
+ r.allocator.free(absolute_original_path);
+ }
+ }
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for \"{s}\" ", .{cleaned}) catch {};
- }
- var remapped = pkg.browser_map.get(cleaned);
- if (remapped == null) {
- for (r.opts.extension_order) |ext| {
- std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned);
- std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext);
- const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len];
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ return null;
+ }
}
- if (pkg.browser_map.get(new_path)) |_remapped| {
- remapped = _remapped;
- cleaned = new_path;
- break;
+ }
+
+ const TSConfigMatch = struct {
+ prefix: string,
+ suffix: string,
+ original_paths: []string,
+ };
+
+ var longest_match: TSConfigMatch = undefined;
+ var longest_match_prefix_length: i32 = -1;
+ var longest_match_suffix_length: i32 = -1;
+
+ var iter = tsconfig.paths.iterator();
+ while (iter.next()) |entry| {
+ const key = entry.key;
+ const original_paths = entry.value;
+
+ if (strings.indexOfChar(key, '*')) |star_index| {
+ const prefix = key[0..star_index];
+ const suffix = key[star_index..key.len];
+
+ // Find the match with the longest prefix. If two matches have the same
+ // prefix length, pick the one with the longest suffix. This second edge
+ // case isn't handled by the TypeScript compiler, but we handle it
+ // because we want the output to always be deterministic and Go map
+ // iteration order is deliberately non-deterministic.
+ if (strings.startsWith(path, prefix) and strings.endsWith(path, suffix) and (prefix.len > longest_match_prefix_length or (prefix.len == longest_match_prefix_length and suffix.len > longest_match_suffix_length))) {
+ longest_match_prefix_length = @intCast(i32, prefix.len);
+ longest_match_suffix_length = @intCast(i32, suffix.len);
+ longest_match = TSConfigMatch{ .prefix = prefix, .suffix = suffix, .original_paths = original_paths };
+ }
}
}
- }
- if (remapped) |remap| {
- // "" == disabled, {"browser": { "file.js": false }}
- if (remap.len == 0 or (remap.len == 1 and remap[0] == '.')) {
+ // If there is at least one match, only consider the one with the longest
+ // prefix. This matches the behavior of the TypeScript compiler.
+ if (longest_match_prefix_length > -1) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found \"{s}\" marked as disabled", .{remap}) catch {};
+ debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix }) catch unreachable;
}
- return remap;
- }
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found \"{s}\" remapped to \"{s}\"", .{ original_cleaned, remap }) catch {};
+ for (longest_match.original_paths) |original_path| {
+ // Swap out the "*" in the original path for whatever the "*" matched
+ const matched_text = path[longest_match.prefix.len .. path.len - longest_match.suffix.len];
+
+ std.mem.copy(
+ u8,
+ &TemporaryBuffer.TSConfigMatchPathBuf,
+ original_path,
+ );
+ var start: usize = 0;
+ var total_length: usize = 0;
+ const star = std.mem.indexOfScalar(u8, original_path, '*') orelse unreachable;
+ total_length = star;
+ std.mem.copy(u8, &TemporaryBuffer.TSConfigMatchPathBuf, original_path[0..total_length]);
+ start = total_length;
+ total_length += matched_text.len;
+ std.mem.copy(u8, TemporaryBuffer.TSConfigMatchPathBuf[start..total_length], matched_text);
+ start = total_length;
+
+ total_length += original_path.len - star + 1; // this might be an off by one.
+ std.mem.copy(u8, TemporaryBuffer.TSConfigMatchPathBuf[start..TemporaryBuffer.TSConfigMatchPathBuf.len], original_path[star..original_path.len]);
+ const region = TemporaryBuffer.TSConfigMatchPathBuf[0..total_length];
+
+ // Load the original path relative to the "baseUrl" from tsconfig.json
+ var absolute_original_path: string = region;
+
+ var did_allocate = false;
+ if (!std.fs.path.isAbsolute(region)) {
+ var paths = [_]string{ abs_base_url, original_path };
+ absolute_original_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
+ did_allocate = true;
+ } else {
+ absolute_original_path = std.mem.dupe(r.allocator, u8, region) catch unreachable;
+ }
+
+ if (r.loadAsFileOrDirectory(absolute_original_path, kind)) |res| {
+ return res;
+ }
+ }
}
- // Only allocate on successful remapping.
- return r.allocator.dupe(u8, remap) catch unreachable;
+ return null;
}
- return null;
- }
+ pub fn checkBrowserMap(r: *ThisResolver, pkg: *PackageJSON, input_path: string) ?string {
+ // Normalize the path so we can compare against it without getting confused by "./"
+ var cleaned = r.fs.normalize(input_path);
+ const original_cleaned = cleaned;
- pub fn loadFromMainField(r: *Resolver, path: string, dir_info: *DirInfo, _field_rel_path: string, field: string, extension_order: []const string) ?MatchResult {
- var field_rel_path = _field_rel_path;
- // Is this a directory?
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path }) catch {};
- debug.increaseIndent() catch {};
- }
+ if (cleaned.len == 1 and cleaned[0] == '.') {
+ // No bundler supports remapping ".", so we don't either
+ return null;
+ }
- defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.addNoteFmt("Checking for \"{s}\" in the \"browser\" map in \"{s}\"", .{ input_path, pkg.source.path.text }) catch {};
}
- }
- // Potentially remap using the "browser" field
- if (dir_info.getEnclosingBrowserScope()) |browser_scope| {
- if (browser_scope.package_json) |browser_json| {
- if (r.checkBrowserMap(browser_json, field_rel_path)) |remap| {
- // Is the path disabled?
- if (remap.len == 0) {
- const paths = [_]string{ path, field_rel_path };
- const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
- var _path = Path.init(new_path);
- _path.is_disabled = true;
- return MatchResult{
- .path_pair = PathPair{
- .primary = _path,
- },
- .package_json_version = browser_json.version,
- };
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Checking for \"{s}\" ", .{cleaned}) catch {};
+ }
+ var remapped = pkg.browser_map.get(cleaned);
+ if (remapped == null) {
+ for (r.opts.extension_order) |ext| {
+ std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned);
+ std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext);
+ const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len];
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
}
+ if (pkg.browser_map.get(new_path)) |_remapped| {
+ remapped = _remapped;
+ cleaned = new_path;
+ break;
+ }
+ }
+ }
+
+ if (remapped) |remap| {
+ // "" == disabled, {"browser": { "file.js": false }}
+ if (remap.len == 0 or (remap.len == 1 and remap[0] == '.')) {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Found \"{s}\" marked as disabled", .{remap}) catch {};
+ }
+ return remap;
+ }
- field_rel_path = remap;
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Found \"{s}\" remapped to \"{s}\"", .{ original_cleaned, remap }) catch {};
}
+
+ // Only allocate on successful remapping.
+ return r.allocator.dupe(u8, remap) catch unreachable;
}
- }
- const _paths = [_]string{ field_rel_path, path };
- const field_abs_path = r.fs.absAlloc(r.allocator, &_paths) catch unreachable;
- const field_dir_info = (r.dirInfoCached(field_abs_path) catch null) orelse {
- r.allocator.free(field_abs_path);
return null;
- };
+ }
- return r.loadAsIndexWithBrowserRemapping(field_dir_info, field_abs_path, extension_order) orelse {
- r.allocator.free(field_abs_path);
- return null;
- };
- }
+ pub fn loadFromMainField(r: *ThisResolver, path: string, dir_info: *DirInfo, _field_rel_path: string, field: string, extension_order: []const string) ?MatchResult {
+ var field_rel_path = _field_rel_path;
+ // Is this a directory?
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path }) catch {};
+ debug.increaseIndent() catch {};
+ }
- pub fn loadAsIndex(r: *Resolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult {
- var rfs = &r.fs.fs;
- // Try the "index" file with extensions
- for (extension_order) |ext| {
- var base = TemporaryBuffer.ExtensionPathBuf[0 .. "index".len + ext.len];
- base[0.."index".len].* = "index".*;
- std.mem.copy(u8, base["index".len..base.len], ext);
-
- if (dir_info.getEntries()) |entries| {
- if (entries.get(base)) |lookup| {
- if (lookup.entry.kind(rfs) == .file) {
- const parts = [_]string{ path, base };
- const out_buf = r.fs.absAlloc(r.allocator, &parts) catch unreachable;
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable;
+ defer {
+ if (r.debug_logs) |*debug| {
+ debug.decreaseIndent() catch {};
+ }
+ }
+
+ // Potentially remap using the "browser" field
+ if (dir_info.getEnclosingBrowserScope()) |browser_scope| {
+ if (browser_scope.package_json) |browser_json| {
+ if (r.checkBrowserMap(browser_json, field_rel_path)) |remap| {
+ // Is the path disabled?
+ if (remap.len == 0) {
+ const paths = [_]string{ path, field_rel_path };
+ const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
+ var _path = Path.init(new_path);
+ _path.is_disabled = true;
+ return MatchResult{
+ .path_pair = PathPair{
+ .primary = _path,
+ },
+ .package_json_version = browser_json.version,
+ };
}
- return MatchResult{
- .path_pair = .{ .primary = Path.init(out_buf) },
- .diff_case = lookup.diff_case,
- .dirname_fd = dir_info.getFileDescriptor(),
- };
+ field_rel_path = remap;
}
}
}
+ const _paths = [_]string{ field_rel_path, path };
+ const field_abs_path = r.fs.absAlloc(r.allocator, &_paths) catch unreachable;
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base }) catch unreachable;
- }
- }
-
- return null;
- }
+ const field_dir_info = (r.dirInfoCached(field_abs_path) catch null) orelse {
+ r.allocator.free(field_abs_path);
+ return null;
+ };
- pub fn loadAsIndexWithBrowserRemapping(r: *Resolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult {
- if (dir_info.getEnclosingBrowserScope()) |browser_scope| {
- const field_rel_path = comptime "index";
- if (browser_scope.package_json) |browser_json| {
- if (r.checkBrowserMap(browser_json, field_rel_path)) |remap| {
- // Is the path disabled?
- // This doesn't really make sense to me.
- if (remap.len == 0) {
- const paths = [_]string{ path, field_rel_path };
- const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
- var _path = Path.init(new_path);
- _path.is_disabled = true;
- return MatchResult{
- .path_pair = PathPair{
- .primary = _path,
- },
- .package_json_version = browser_json.version,
- };
- }
+ return r.loadAsIndexWithBrowserRemapping(field_dir_info, field_abs_path, extension_order) orelse {
+ r.allocator.free(field_abs_path);
+ return null;
+ };
+ }
- const new_paths = [_]string{ path, remap };
- const remapped_abs = r.fs.absAlloc(r.allocator, &new_paths) catch unreachable;
+ pub fn loadAsIndex(r: *ThisResolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult {
+ var rfs = &r.fs.fs;
+ // Try the "index" file with extensions
+ for (extension_order) |ext| {
+ var base = TemporaryBuffer.ExtensionPathBuf[0 .. "index".len + ext.len];
+ base[0.."index".len].* = "index".*;
+ std.mem.copy(u8, base["index".len..base.len], ext);
- // Is this a file
- if (r.loadAsFile(remapped_abs, extension_order)) |file_result| {
- return MatchResult{ .dirname_fd = file_result.dirname_fd, .path_pair = .{ .primary = Path.init(file_result.path) }, .diff_case = file_result.diff_case };
- }
+ if (dir_info.getEntries()) |entries| {
+ if (entries.get(base)) |lookup| {
+ if (lookup.entry.kind(rfs) == .file) {
+ const parts = [_]string{ path, base };
+ const out_buf = r.fs.absAlloc(r.allocator, &parts) catch unreachable;
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable;
+ }
- // Is it a directory with an index?
- if (r.dirInfoCached(remapped_abs) catch null) |new_dir| {
- if (r.loadAsIndex(new_dir, remapped_abs, extension_order)) |absolute| {
- return absolute;
+ return MatchResult{
+ .path_pair = .{ .primary = Path.init(out_buf) },
+ .diff_case = lookup.diff_case,
+ .dirname_fd = dir_info.getFileDescriptor(),
+ };
}
}
+ }
- return null;
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base }) catch unreachable;
}
}
+
+ return null;
}
- return r.loadAsIndex(dir_info, path, extension_order);
- }
+ pub fn loadAsIndexWithBrowserRemapping(r: *ThisResolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult {
+ if (dir_info.getEnclosingBrowserScope()) |browser_scope| {
+ const field_rel_path = comptime "index";
+ if (browser_scope.package_json) |browser_json| {
+ if (r.checkBrowserMap(browser_json, field_rel_path)) |remap| {
+ // Is the path disabled?
+ // This doesn't really make sense to me.
+ if (remap.len == 0) {
+ const paths = [_]string{ path, field_rel_path };
+ const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
+ var _path = Path.init(new_path);
+ _path.is_disabled = true;
+ return MatchResult{
+ .path_pair = PathPair{
+ .primary = _path,
+ },
+ .package_json_version = browser_json.version,
+ };
+ }
- pub fn loadAsFileOrDirectory(r: *Resolver, path: string, kind: ast.ImportKind) ?MatchResult {
- const extension_order = r.opts.extension_order;
+ const new_paths = [_]string{ path, remap };
+ const remapped_abs = r.fs.absAlloc(r.allocator, &new_paths) catch unreachable;
- // Is this a file?
- if (r.loadAsFile(path, extension_order)) |file| {
- return MatchResult{
- .path_pair = .{ .primary = Path.init(file.path) },
- .diff_case = file.diff_case,
- .dirname_fd = file.dirname_fd,
- };
- }
+ // Is this a file
+ if (r.loadAsFile(remapped_abs, extension_order)) |file_result| {
+ return MatchResult{ .dirname_fd = file_result.dirname_fd, .path_pair = .{ .primary = Path.init(file_result.path) }, .diff_case = file_result.diff_case };
+ }
- // Is this a directory?
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path}) catch {};
- debug.increaseIndent() catch {};
- }
- defer {
- if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ // Is it a directory with an index?
+ if (r.dirInfoCached(remapped_abs) catch null) |new_dir| {
+ if (r.loadAsIndex(new_dir, remapped_abs, extension_order)) |absolute| {
+ return absolute;
+ }
+ }
+
+ return null;
+ }
+ }
}
+
+ return r.loadAsIndex(dir_info, path, extension_order);
}
- const dir_info = (r.dirInfoCached(path) catch null) orelse return null;
- var package_json_version: ?string = null;
+ pub fn loadAsFileOrDirectory(r: *ThisResolver, path: string, kind: ast.ImportKind) ?MatchResult {
+ const extension_order = r.opts.extension_order;
- // Try using the main field(s) from "package.json"
- if (dir_info.package_json) |pkg_json| {
- package_json_version = pkg_json.version;
- if (pkg_json.main_fields.count() > 0) {
- const main_field_values = pkg_json.main_fields;
- const main_field_keys = r.opts.main_fields;
- // TODO: check this works right. Not sure this will really work.
- const auto_main = r.opts.main_fields.ptr == options.Platform.DefaultMainFields.get(r.opts.platform).ptr;
+ // Is this a file?
+ if (r.loadAsFile(path, extension_order)) |file| {
+ return MatchResult{
+ .path_pair = .{ .primary = Path.init(file.path) },
+ .diff_case = file.diff_case,
+ .dirname_fd = file.dirname_fd,
+ };
+ }
+ // Is this a directory?
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path}) catch {};
+ debug.increaseIndent() catch {};
+ }
+ defer {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text}) catch {};
+ debug.decreaseIndent() catch {};
}
+ }
- for (main_field_keys) |key| {
- const field_rel_path = (main_field_values.get(key)) orelse {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Did not find main field \"{s}\"", .{key}) catch {};
- }
- continue;
- };
+ const dir_info = (r.dirInfoCached(path) catch null) orelse return null;
+ var package_json_version: ?string = null;
- var _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue;
+ // Try using the main field(s) from "package.json"
+ if (dir_info.package_json) |pkg_json| {
+ package_json_version = pkg_json.version;
+ if (pkg_json.main_fields.count() > 0) {
+ const main_field_values = pkg_json.main_fields;
+ const main_field_keys = r.opts.main_fields;
+ // TODO: check this works right. Not sure this will really work.
+ const auto_main = r.opts.main_fields.ptr == options.Platform.DefaultMainFields.get(r.opts.platform).ptr;
- // If the user did not manually configure a "main" field order, then
- // use a special per-module automatic algorithm to decide whether to
- // use "module" or "main" based on whether the package is imported
- // using "import" or "require".
- if (auto_main and strings.eqlComptime(key, "module")) {
- var absolute_result: ?MatchResult = null;
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text}) catch {};
+ }
- if (main_field_values.get("main")) |main_rel_path| {
- if (main_rel_path.len > 0) {
- absolute_result = r.loadFromMainField(path, dir_info, main_rel_path, "main", extension_order);
+ for (main_field_keys) |key| {
+ const field_rel_path = (main_field_values.get(key)) orelse {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Did not find main field \"{s}\"", .{key}) catch {};
}
- } else {
- // Some packages have a "module" field without a "main" field but
- // still have an implicit "index.js" file. In that case, treat that
- // as the value for "main".
- absolute_result = r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order);
- }
+ continue;
+ };
- if (absolute_result) |auto_main_result| {
- // If both the "main" and "module" fields exist, use "main" if the
- // path is for "require" and "module" if the path is for "import".
- // If we're using "module", return enough information to be able to
- // fall back to "main" later if something ended up using "require()"
- // with this same path. The goal of this code is to avoid having
- // both the "module" file and the "main" file in the bundle at the
- // same time.
- if (kind != ast.ImportKind.require) {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text }) catch {};
+ var _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue;
- debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text}) catch {};
- }
+ // If the user did not manually configure a "main" field order, then
+ // use a special per-module automatic algorithm to decide whether to
+ // use "module" or "main" based on whether the package is imported
+ // using "import" or "require".
+ if (auto_main and strings.eqlComptime(key, "module")) {
+ var absolute_result: ?MatchResult = null;
- return MatchResult{
- .path_pair = .{
- .primary = auto_main_result.path_pair.primary,
- .secondary = _result.path_pair.primary,
- },
- .diff_case = auto_main_result.diff_case,
- .dirname_fd = auto_main_result.dirname_fd,
- .package_json_version = pkg_json.version,
- };
+ if (main_field_values.get("main")) |main_rel_path| {
+ if (main_rel_path.len > 0) {
+ absolute_result = r.loadFromMainField(path, dir_info, main_rel_path, "main", extension_order);
+ }
} else {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved to \"{s}\" using the \"{s}\" field in \"{s}\"", .{
- auto_main_result.path_pair.primary.text,
- key,
- pkg_json.source.key_path.text,
- }) catch {};
+ // Some packages have a "module" field without a "main" field but
+ // still have an implicit "index.js" file. In that case, treat that
+ // as the value for "main".
+ absolute_result = r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order);
+ }
+
+ if (absolute_result) |auto_main_result| {
+ // If both the "main" and "module" fields exist, use "main" if the
+ // path is for "require" and "module" if the path is for "import".
+ // If we're using "module", return enough information to be able to
+ // fall back to "main" later if something ended up using "require()"
+ // with this same path. The goal of this code is to avoid having
+ // both the "module" file and the "main" file in the bundle at the
+ // same time.
+ if (kind != ast.ImportKind.require) {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text }) catch {};
+
+ debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text}) catch {};
+ }
+
+ return MatchResult{
+ .path_pair = .{
+ .primary = auto_main_result.path_pair.primary,
+ .secondary = _result.path_pair.primary,
+ },
+ .diff_case = auto_main_result.diff_case,
+ .dirname_fd = auto_main_result.dirname_fd,
+ .package_json_version = pkg_json.version,
+ };
+ } else {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Resolved to \"{s}\" using the \"{s}\" field in \"{s}\"", .{
+ auto_main_result.path_pair.primary.text,
+ key,
+ pkg_json.source.key_path.text,
+ }) catch {};
+ }
+ var _auto_main_result = auto_main_result;
+ _auto_main_result.package_json_version = pkg_json.version;
+ return _auto_main_result;
}
- var _auto_main_result = auto_main_result;
- _auto_main_result.package_json_version = pkg_json.version;
- return _auto_main_result;
}
}
}
}
}
- }
- // Look for an "index" file with known extensions
- if (r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order)) |*res| {
- if (res.package_json_version == null and package_json_version != null) {
- res.package_json_version = package_json_version;
+ // Look for an "index" file with known extensions
+ if (r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order)) |*res| {
+ if (res.package_json_version == null and package_json_version != null) {
+ res.package_json_version = package_json_version;
+ }
+ return res.*;
}
- return res.*;
- }
- return null;
- }
+ return null;
+ }
- pub fn loadAsFile(r: *Resolver, path: string, extension_order: []const string) ?LoadResult {
- var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
+ pub fn loadAsFile(r: *ThisResolver, path: string, extension_order: []const string) ?LoadResult {
+ var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path}) catch {};
- debug.increaseIndent() catch {};
- }
- defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path}) catch {};
+ debug.increaseIndent() catch {};
}
- }
-
- const dir_path = std.fs.path.dirname(path) orelse "/";
-
- const dir_entry: *Fs.FileSystem.RealFS.EntriesOption = rfs.readDirectory(dir_path, null, false) catch {
- return null;
- };
-
- if (@as(Fs.FileSystem.RealFS.EntriesOption.Tag, dir_entry.*) == .err) {
- if (dir_entry.err.original_err != error.ENOENT) {
- r.log.addErrorFmt(
- null,
- logger.Loc.Empty,
- r.allocator,
- "Cannot read directory \"{s}\": {s}",
- .{
- r.prettyPath(Path.init(dir_path)),
- @errorName(dir_entry.err.original_err),
- },
- ) catch {};
+ defer {
+ if (r.debug_logs) |*debug| {
+ debug.decreaseIndent() catch {};
+ }
}
- return null;
- }
-
- const entries = dir_entry.entries;
- const base = std.fs.path.basename(path);
+ const dir_path = std.fs.path.dirname(path) orelse "/";
- // Try the plain path without any extensions
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}\" ", .{base}) catch {};
- }
+ const dir_entry: *Fs.FileSystem.RealFS.EntriesOption = rfs.readDirectory(dir_path, null, false) catch {
+ return null;
+ };
- if (entries.get(base)) |query| {
- if (query.entry.kind(rfs) == .file) {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {};
+ if (@as(Fs.FileSystem.RealFS.EntriesOption.Tag, dir_entry.*) == .err) {
+ if (dir_entry.err.original_err != error.ENOENT) {
+ r.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ r.allocator,
+ "Cannot read directory \"{s}\": {s}",
+ .{
+ r.prettyPath(Path.init(dir_path)),
+ @errorName(dir_entry.err.original_err),
+ },
+ ) catch {};
}
- const abs_path_parts = [_]string{ query.entry.dir, query.entry.base };
- const abs_path = r.fs.filename_store.append(r.fs.joinBuf(&abs_path_parts, &TemporaryBuffer.ExtensionPathBuf)) catch unreachable;
-
- return LoadResult{
- .path = abs_path,
- .diff_case = query.diff_case,
- .dirname_fd = entries.fd,
- };
+ return null;
}
- }
- // Try the path with extensions
+ const entries = dir_entry.entries;
- std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, path);
- for (r.opts.extension_order) |ext| {
- var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. path.len + ext.len];
- std.mem.copy(u8, buffer[path.len..buffer.len], ext);
- const file_name = buffer[path.len - base.len .. buffer.len];
+ const base = std.fs.path.basename(path);
+ // Try the plain path without any extensions
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}{s}\" ", .{ base, ext }) catch {};
+ debug.addNoteFmt("Checking for file \"{s}\" ", .{base}) catch {};
}
- if (entries.get(file_name)) |query| {
+ if (entries.get(base)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {};
}
+ const abs_path_parts = [_]string{ query.entry.dir, query.entry.base };
+ const abs_path = r.fs.filename_store.append(r.fs.joinBuf(&abs_path_parts, &TemporaryBuffer.ExtensionPathBuf)) catch unreachable;
- // now that we've found it, we allocate it.
return LoadResult{
- .path = r.fs.filename_store.append(buffer) catch unreachable,
+ .path = abs_path,
.diff_case = query.diff_case,
.dirname_fd = entries.fd,
};
}
}
- }
- // TypeScript-specific behavior: if the extension is ".js" or ".jsx", try
- // replacing it with ".ts" or ".tsx". At the time of writing this specific
- // behavior comes from the function "loadModuleFromFile()" in the file
- // "moduleNameResolver.ts" in the TypeScript compiler source code. It
- // contains this comment:
- //
- // If that didn't work, try stripping a ".js" or ".jsx" extension and
- // replacing it with a TypeScript one; e.g. "./foo.js" can be matched
- // by "./foo.ts" or "./foo.d.ts"
- //
- // We don't care about ".d.ts" files because we can't do anything with
- // those, so we ignore that part of the behavior.
- //
- // See the discussion here for more historical context:
- // https://github.com/microsoft/TypeScript/issues/4595
- if (strings.lastIndexOfChar(base, '.')) |last_dot| {
- const ext = base[last_dot..base.len];
- if (strings.eql(ext, ".js") or strings.eql(ext, ".jsx")) {
- const segment = base[0..last_dot];
- std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, segment);
-
- const exts = comptime [_]string{ ".ts", ".tsx" };
-
- for (exts) |ext_to_replace| {
- var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. segment.len + ext_to_replace.len];
- std.mem.copy(u8, buffer[segment.len..buffer.len], ext_to_replace);
-
- if (entries.get(buffer)) |query| {
- if (query.entry.kind(rfs) == .file) {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer}) catch {};
- }
+ // Try the path with extensions
- return LoadResult{
- .path = r.fs.filename_store.append(buffer) catch unreachable,
- .diff_case = query.diff_case,
- .dirname_fd = entries.fd,
- };
+ std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, path);
+ for (r.opts.extension_order) |ext| {
+ var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. path.len + ext.len];
+ std.mem.copy(u8, buffer[path.len..buffer.len], ext);
+ const file_name = buffer[path.len - base.len .. buffer.len];
+
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Checking for file \"{s}{s}\" ", .{ base, ext }) catch {};
+ }
+
+ if (entries.get(file_name)) |query| {
+ if (query.entry.kind(rfs) == .file) {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {};
}
+
+ // now that we've found it, we allocate it.
+ return LoadResult{
+ .path = r.fs.filename_store.append(buffer) catch unreachable,
+ .diff_case = query.diff_case,
+ .dirname_fd = entries.fd,
+ };
}
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base}) catch {};
+ }
+ }
+
+ // TypeScript-specific behavior: if the extension is ".js" or ".jsx", try
+ // replacing it with ".ts" or ".tsx". At the time of writing this specific
+ // behavior comes from the function "loadModuleFromFile()" in the file
+ // "moduleNameThisResolver.ts" in the TypeScript compiler source code. It
+ // contains this comment:
+ //
+ // If that didn't work, try stripping a ".js" or ".jsx" extension and
+ // replacing it with a TypeScript one; e.g. "./foo.js" can be matched
+ // by "./foo.ts" or "./foo.d.ts"
+ //
+ // We don't care about ".d.ts" files because we can't do anything with
+ // those, so we ignore that part of the behavior.
+ //
+ // See the discussion here for more historical context:
+ // https://github.com/microsoft/TypeScript/issues/4595
+ if (strings.lastIndexOfChar(base, '.')) |last_dot| {
+ const ext = base[last_dot..base.len];
+ if (strings.eql(ext, ".js") or strings.eql(ext, ".jsx")) {
+ const segment = base[0..last_dot];
+ std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, segment);
+
+ const exts = comptime [_]string{ ".ts", ".tsx" };
+
+ for (exts) |ext_to_replace| {
+ var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. segment.len + ext_to_replace.len];
+ std.mem.copy(u8, buffer[segment.len..buffer.len], ext_to_replace);
+
+ if (entries.get(buffer)) |query| {
+ if (query.entry.kind(rfs) == .file) {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer}) catch {};
+ }
+
+ return LoadResult{
+ .path = r.fs.filename_store.append(buffer) catch unreachable,
+ .diff_case = query.diff_case,
+ .dirname_fd = entries.fd,
+ };
+ }
+ }
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base}) catch {};
+ }
}
}
}
- }
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to find \"{s}\" ", .{path}) catch {};
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Failed to find \"{s}\" ", .{path}) catch {};
+ }
+ return null;
}
- return null;
- }
- threadlocal var dir_info_uncached_filename_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
- fn dirInfoUncached(
- r: *Resolver,
- path: string,
- _entries: *Fs.FileSystem.RealFS.EntriesOption,
- _result: allocators.Result,
- dir_entry_index: allocators.IndexType,
- parent: ?*DirInfo,
- parent_index: allocators.IndexType,
- fd: FileDescriptorType,
- ) anyerror!DirInfo {
- var result = _result;
-
- var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
- var entries = _entries.entries;
-
- var info = DirInfo{
- .abs_path = path,
- .parent = parent_index,
- .entries = dir_entry_index,
- };
+ threadlocal var dir_info_uncached_filename_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ fn dirInfoUncached(
+ r: *ThisResolver,
+ path: string,
+ _entries: *Fs.FileSystem.RealFS.EntriesOption,
+ _result: allocators.Result,
+ dir_entry_index: allocators.IndexType,
+ parent: ?*DirInfo,
+ parent_index: allocators.IndexType,
+ fd: FileDescriptorType,
+ ) anyerror!DirInfo {
+ var result = _result;
+
+ var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
+ var entries = _entries.entries;
+
+ var info = DirInfo{
+ .abs_path = path,
+ .parent = parent_index,
+ .entries = dir_entry_index,
+ };
- // A "node_modules" directory isn't allowed to directly contain another "node_modules" directory
- var base = std.fs.path.basename(path);
- // if (entries != null) {
- if (!strings.eqlComptime(base, "node_modules")) {
- if (entries.getComptimeQuery("node_modules")) |entry| {
- // the catch might be wrong!
- info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
+ // A "node_modules" directory isn't allowed to directly contain another "node_modules" directory
+ var base = std.fs.path.basename(path);
+ // if (entries != null) {
+ if (!strings.eqlComptime(base, "node_modules")) {
+ if (entries.getComptimeQuery("node_modules")) |entry| {
+ // the catch might be wrong!
+ info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
+ }
}
- }
- // }
+ // }
- if (parent != null) {
+ if (parent != null) {
- // Propagate the browser scope into child directories
- info.enclosing_browser_scope = parent.?.enclosing_browser_scope;
+ // Propagate the browser scope into child directories
+ info.enclosing_browser_scope = parent.?.enclosing_browser_scope;
- // Make sure "absRealPath" is the real path of the directory (resolving any symlinks)
- if (!r.opts.preserve_symlinks) {
- if (parent.?.getEntries()) |parent_entries| {
- if (parent_entries.get(base)) |lookup| {
- const entry = lookup.entry;
+ // Make sure "absRealPath" is the real path of the directory (resolving any symlinks)
+ if (!r.opts.preserve_symlinks) {
+ if (parent.?.getEntries()) |parent_entries| {
+ if (parent_entries.get(base)) |lookup| {
+ const entry = lookup.entry;
- var symlink = entry.symlink(rfs);
- if (symlink.len > 0) {
- if (r.debug_logs) |*logs| {
- try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
- }
- info.abs_real_path = symlink;
- } else if (parent.?.abs_real_path.len > 0) {
- // this might leak a little i'm not sure
- const parts = [_]string{ parent.?.abs_real_path, base };
- symlink = r.fs.filename_store.append(r.fs.joinBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
-
- if (r.debug_logs) |*logs| {
- try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ var symlink = entry.symlink(rfs);
+ if (symlink.len > 0) {
+ if (r.debug_logs) |*logs| {
+ try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ }
+ info.abs_real_path = symlink;
+ } else if (parent.?.abs_real_path.len > 0) {
+ // this might leak a little i'm not sure
+ const parts = [_]string{ parent.?.abs_real_path, base };
+ symlink = r.fs.filename_store.append(r.fs.joinBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
+
+ if (r.debug_logs) |*logs| {
+ try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ }
+ info.abs_real_path = symlink;
}
- info.abs_real_path = symlink;
}
}
}
}
- }
- // Record if this directory has a package.json file
- if (entries.getComptimeQuery("package.json")) |lookup| {
- const entry = lookup.entry;
- if (entry.kind(rfs) == .file) {
- info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null;
+ // Record if this directory has a package.json file
+ if (entries.getComptimeQuery("package.json")) |lookup| {
+ const entry = lookup.entry;
+ if (entry.kind(rfs) == .file) {
+ info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null;
- if (info.package_json) |pkg| {
- if (pkg.browser_map.count() > 0) {
- info.enclosing_browser_scope = result.index;
- }
+ if (info.package_json) |pkg| {
+ if (pkg.browser_map.count() > 0) {
+ info.enclosing_browser_scope = result.index;
+ }
- if (r.debug_logs) |*logs| {
- logs.addNoteFmt("Resolved package.json in \"{s}\"", .{
- path,
- }) catch unreachable;
+ if (r.debug_logs) |*logs| {
+ logs.addNoteFmt("Resolved package.json in \"{s}\"", .{
+ path,
+ }) catch unreachable;
+ }
}
}
}
- }
-
- // Record if this directory has a tsconfig.json or jsconfig.json file
- {
- var tsconfig_path: ?string = null;
- if (r.opts.tsconfig_override == null) {
- if (entries.getComptimeQuery("tsconfig.json")) |lookup| {
- const entry = lookup.entry;
- if (entry.kind(rfs) == .file) {
- const parts = [_]string{ path, "tsconfig.json" };
- tsconfig_path = r.fs.absBuf(&parts, &dir_info_uncached_filename_buf);
- }
- }
- if (tsconfig_path == null) {
- if (entries.getComptimeQuery("jsconfig.json")) |lookup| {
+ // Record if this directory has a tsconfig.json or jsconfig.json file
+ {
+ var tsconfig_path: ?string = null;
+ if (r.opts.tsconfig_override == null) {
+ if (entries.getComptimeQuery("tsconfig.json")) |lookup| {
const entry = lookup.entry;
if (entry.kind(rfs) == .file) {
- const parts = [_]string{ path, "jsconfig.json" };
+ const parts = [_]string{ path, "tsconfig.json" };
+
tsconfig_path = r.fs.absBuf(&parts, &dir_info_uncached_filename_buf);
}
}
+ if (tsconfig_path == null) {
+ if (entries.getComptimeQuery("jsconfig.json")) |lookup| {
+ const entry = lookup.entry;
+ if (entry.kind(rfs) == .file) {
+ const parts = [_]string{ path, "jsconfig.json" };
+ tsconfig_path = r.fs.absBuf(&parts, &dir_info_uncached_filename_buf);
+ }
+ }
+ }
+ } else if (parent == null) {
+ tsconfig_path = r.opts.tsconfig_override.?;
+ }
+
+ if (tsconfig_path) |tsconfigpath| {
+ info.tsconfig_json = r.parseTSConfig(
+ tsconfigpath,
+ if (FeatureFlags.store_file_descriptors) fd else 0,
+ ) catch |err| brk: {
+ const pretty = r.prettyPath(Path.init(tsconfigpath));
+
+ if (err == error.ENOENT) {
+ r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot find tsconfig file \"{s}\"", .{pretty}) catch unreachable;
+ } else if (err != error.ParseErrorAlreadyLogged and err != error.IsDir) {
+ r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot read file \"{s}\": {s}", .{ pretty, @errorName(err) }) catch unreachable;
+ }
+ break :brk null;
+ };
}
- } else if (parent == null) {
- tsconfig_path = r.opts.tsconfig_override.?;
}
- if (tsconfig_path) |tsconfigpath| {
- info.tsconfig_json = r.parseTSConfig(
- tsconfigpath,
- if (FeatureFlags.store_file_descriptors) fd else 0,
- ) catch |err| brk: {
- const pretty = r.prettyPath(Path.init(tsconfigpath));
-
- if (err == error.ENOENT) {
- r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot find tsconfig file \"{s}\"", .{pretty}) catch unreachable;
- } else if (err != error.ParseErrorAlreadyLogged and err != error.IsDir) {
- r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Cannot read file \"{s}\": {s}", .{ pretty, @errorName(err) }) catch unreachable;
- }
- break :brk null;
- };
+ if (info.tsconfig_json == null and parent != null) {
+ info.tsconfig_json = parent.?.tsconfig_json;
}
- }
- if (info.tsconfig_json == null and parent != null) {
- info.tsconfig_json = parent.?.tsconfig_json;
+ return info;
}
+ };
+}
- return info;
- }
-};
+pub const Resolver = NewResolver(true);
+pub const ResolverUncached = NewResolver(false);
diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig
index 7b6977bba..8b380d89f 100644
--- a/src/resolver/tsconfig_json.zig
+++ b/src/resolver/tsconfig_json.zig
@@ -60,7 +60,8 @@ pub const TSConfigJSON = struct {
allocator: *std.mem.Allocator,
log: *logger.Log,
source: logger.Source,
- json_cache: *cache.Cache.Json,
+ comptime JSONCache: type,
+ json_cache: *JSONCache,
) anyerror!?*TSConfigJSON {
// Unfortunately "tsconfig.json" isn't actually JSON. It's some other
// format that appears to be defined by the implementation details of the