aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-05-31 20:30:40 -0700
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-05-31 20:30:40 -0700
commit38f1eb07012eaf0e09400ba06d06fe15007a9022 (patch)
tree65f2168ac3058a82d0a093f94a8e9d5a8321a6d8 /src
parent87d01c9f4a315341c1c5f57e09e29df88ce9c996 (diff)
downloadbun-38f1eb07012eaf0e09400ba06d06fe15007a9022.tar.gz
bun-38f1eb07012eaf0e09400ba06d06fe15007a9022.tar.zst
bun-38f1eb07012eaf0e09400ba06d06fe15007a9022.zip
linker things
Former-commit-id: 73452660fd144d071cccbccfdcfa7058d9dd91a3
Diffstat (limited to 'src')
-rw-r--r--src/bundler.zig212
-rw-r--r--src/fs.zig6
-rw-r--r--src/hash_map.zig10
-rw-r--r--src/import_record.zig2
-rw-r--r--src/js_ast.zig10
-rw-r--r--src/js_parser/imports.zig1
-rw-r--r--src/linker.zig238
-rw-r--r--src/resolver/resolver.zig13
-rw-r--r--src/runtime.js13
9 files changed, 322 insertions, 183 deletions
diff --git a/src/bundler.zig b/src/bundler.zig
index 0dfc59ae4..ed5b7625d 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -23,6 +23,8 @@ const ImportRecord = @import("./import_record.zig").ImportRecord;
const allocators = @import("./allocators.zig");
const MimeType = @import("./http/mime_type.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
+const runtime = @import("./runtime.zig");
+const Linker = linker.Linker;
pub const ServeResult = struct {
value: Value,
@@ -51,8 +53,8 @@ pub const ServeResult = struct {
};
// const BundleMap =
-const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(Resolver.Resolver.Result);
-const ResolveQueue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic);
+pub const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(Resolver.Resolver.Result);
+pub const ResolveQueue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic);
pub const Bundler = struct {
options: options.BundleOptions,
log: *logger.Log,
@@ -66,8 +68,7 @@ pub const Bundler = struct {
resolve_queue: ResolveQueue,
elapsed: i128 = 0,
needs_runtime: bool = false,
-
- runtime_output_path: Fs.Path = undefined,
+ linker: Linker,
pub const RuntimeCode = @embedFile("./runtime.js");
@@ -85,7 +86,6 @@ pub const Bundler = struct {
var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false);
const bundle_options = try options.BundleOptions.fromApi(allocator, fs, log, opts);
- relative_paths_list = ImportPathsList.init(allocator);
// var pool = try allocator.create(ThreadPool);
// try pool.init(ThreadPool.InitConfig{
// .allocator = allocator,
@@ -97,6 +97,7 @@ pub const Bundler = struct {
.resolver = Resolver.Resolver.init1(allocator, log, fs, bundle_options),
.log = log,
// .thread_pool = pool,
+ .linker = undefined,
.result = options.TransformResult{ .outbase = bundle_options.output_dir },
.resolve_results = try ResolveResults.init(allocator),
.resolve_queue = ResolveQueue.init(allocator),
@@ -104,101 +105,16 @@ pub const Bundler = struct {
};
}
- const ImportPathsList = allocators.BSSStringList(2048, 256);
- var relative_paths_list: *ImportPathsList = undefined;
- threadlocal var relative_path_allocator: std.heap.FixedBufferAllocator = undefined;
- threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
- threadlocal var relative_path_allocator_buf_loaded: bool = false;
-
- pub fn generateImportPath(bundler: *Bundler, source_dir: string, source_path: string) !Fs.Path {
- if (!relative_path_allocator_buf_loaded) {
- relative_path_allocator_buf_loaded = true;
- relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
- }
- defer relative_path_allocator.reset();
-
- var pretty = try relative_paths_list.append(bundler.fs.relative(source_dir, source_path));
- var pathname = Fs.PathName.init(pretty);
- var absolute_pathname = Fs.PathName.init(source_path);
-
- if (bundler.options.out_extensions.get(absolute_pathname.ext)) |ext| {
- absolute_pathname.ext = ext;
- }
-
- switch (bundler.options.import_path_format) {
- .relative => {
- return Fs.Path.initWithPretty(pretty, pretty);
- },
- .relative_nodejs => {
- var path = Fs.Path.initWithPretty(pretty, pretty);
- path.text = path.text[0 .. path.text.len - path.name.ext.len];
- return path;
- },
-
- .absolute_url => {
- const absolute_url = try relative_paths_list.append(
- try std.fmt.allocPrint(
- &relative_path_allocator.allocator,
- "{s}{s}{s}{s}",
- .{
- bundler.options.public_url,
- pathname.dir,
- pathname.base,
- absolute_pathname.ext,
- },
- ),
- );
-
- return Fs.Path.initWithPretty(absolute_url, pretty);
- },
-
- else => unreachable,
- }
- }
-
- pub fn processImportRecord(bundler: *Bundler, source_dir: string, resolve_result: *Resolver.Resolver.Result, import_record: *ImportRecord) !void {
-
- // extremely naive.
- resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
-
- if (resolve_result.shouldAssumeCommonJS()) {
- import_record.wrap_with_to_module = true;
- if (!bundler.needs_runtime) {
- bundler.runtime_output_path = Fs.Path.init(try std.fmt.allocPrint(bundler.allocator, "{s}/__runtime.js", .{bundler.fs.top_level_dir}));
- }
- bundler.needs_runtime = true;
- }
-
- // lazy means:
- // Run the resolver
- // Don't parse/print automatically.
- if (bundler.options.resolve_mode != .lazy) {
- try bundler.enqueueResolveResult(resolve_result);
- }
-
- import_record.path = try bundler.generateImportPath(source_dir, resolve_result.path_pair.primary.text);
- }
-
- pub fn resolveResultHashKey(bundler: *Bundler, resolve_result: *const Resolver.Resolver.Result) string {
- var hash_key = resolve_result.path_pair.primary.text;
-
- // Shorter hash key is faster to hash
- if (strings.startsWith(resolve_result.path_pair.primary.text, bundler.fs.top_level_dir)) {
- hash_key = resolve_result.path_pair.primary.text[bundler.fs.top_level_dir.len..];
- }
-
- return hash_key;
- }
-
- pub fn enqueueResolveResult(bundler: *Bundler, resolve_result: *const Resolver.Resolver.Result) !void {
- const hash_key = bundler.resolveResultHashKey(resolve_result);
-
- const get_or_put_entry = try bundler.resolve_results.backing.getOrPut(hash_key);
-
- if (!get_or_put_entry.found_existing) {
- get_or_put_entry.entry.value = resolve_result.*;
- try bundler.resolve_queue.writeItem(resolve_result.*);
- }
+ pub fn configureLinker(bundler: *Bundler) void {
+ bundler.linker = Linker.init(
+ bundler.allocator,
+ bundler.log,
+ &bundler.resolve_queue,
+ &bundler.options,
+ &bundler.resolver,
+ bundler.resolve_results,
+ bundler.fs,
+ );
}
pub fn buildWithResolveResult(bundler: *Bundler, resolve_result: Resolver.Resolver.Result) !?options.OutputFile {
@@ -212,74 +128,14 @@ pub const Bundler = struct {
const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
var file_path = resolve_result.path_pair.primary;
- file_path.pretty = relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
+ file_path.pretty = Linker.relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
var result = bundler.parse(file_path, loader, resolve_result.dirname_fd) orelse {
js_ast.Expr.Data.Store.reset();
js_ast.Stmt.Data.Store.reset();
return null;
};
- switch (result.loader) {
- .jsx, .js, .ts, .tsx => {
- const ast = result.ast;
-
- for (ast.import_records) |*import_record| {
- const source_dir = file_path.name.dir;
-
- if (bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
- bundler.processImportRecord(
- // Include trailing slash
- file_path.text[0 .. source_dir.len + 1],
- resolved_import,
- import_record,
- ) catch continue;
-
- // "Linking"
- // 1. Associate an ImportRecord with NamedImports
- // 2. If there is a default import, import the runtime wrapper
- } else |err| {
- switch (err) {
- error.ModuleNotFound => {
- if (Resolver.Resolver.isPackagePath(import_record.path.text)) {
- if (bundler.options.platform != .node and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
- try bundler.log.addRangeErrorFmt(
- &result.source,
- import_record.range,
- bundler.allocator,
- "Could not resolve: \"{s}\". Try setting --platform=\"node\"",
- .{import_record.path.text},
- );
- } else {
- try bundler.log.addRangeErrorFmt(
- &result.source,
- import_record.range,
- bundler.allocator,
- "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?",
- .{import_record.path.text},
- );
- }
- } else {
- try bundler.log.addRangeErrorFmt(
- &result.source,
- import_record.range,
- bundler.allocator,
- "Could not resolve: \"{s}\"",
- .{
- import_record.path.text,
- },
- );
- continue;
- }
- },
- else => {
- continue;
- },
- }
- }
- }
- },
- else => {},
- }
+ try bundler.linker.link(file_path, &result);
const output_file = try bundler.print(
result,
@@ -311,7 +167,6 @@ pub const Bundler = struct {
const ast = result.ast;
- var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
const print_result = try js_printer.printAst(
@@ -320,8 +175,11 @@ pub const Bundler = struct {
js_ast.Symbol.Map.initList(symbols),
&result.source,
false,
- js_printer.Options{ .to_module_ref = Ref.RuntimeRef },
- &_linker,
+ js_printer.Options{
+ .to_module_ref = Ref.RuntimeRef,
+ .externals = ast.externals,
+ },
+ &bundler.linker,
);
// allocator.free(result.source.contents);
@@ -531,6 +389,7 @@ pub const Bundler = struct {
opts: Api.TransformOptions,
) !options.TransformResult {
var bundler = try Bundler.init(allocator, log, opts);
+ bundler.configureLinker();
// 100.00 µs std.fifo.LinearFifo(resolver.resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity
if (bundler.options.resolve_mode != .lazy) {
@@ -620,8 +479,8 @@ pub const Bundler = struct {
switch (bundler.options.resolve_mode) {
.lazy, .dev, .bundle => {
while (bundler.resolve_queue.readItem()) |item| {
- defer js_ast.Expr.Data.Store.reset();
- defer js_ast.Stmt.Data.Store.reset();
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
const output_file = bundler.buildWithResolveResult(item) catch continue orelse continue;
bundler.output_files.append(output_file) catch unreachable;
}
@@ -635,11 +494,12 @@ pub const Bundler = struct {
// }
// }
- // if (bundler.needs_runtime) {
- // try bundler.output_files.append(options.OutputFile{
-
- // });
- // }
+ if (bundler.linker.any_needs_runtime) {
+ try bundler.output_files.append(options.OutputFile{
+ .path = bundler.linker.runtime_source_path,
+ .contents = runtime.SourceContent,
+ });
+ }
if (enableTracing) {
Output.printError(
@@ -814,7 +674,6 @@ pub const Transformer = struct {
},
}
- var _linker = linker.Linker{};
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
return try js_printer.printAst(
@@ -823,8 +682,11 @@ pub const Transformer = struct {
js_ast.Symbol.Map.initList(symbols),
source,
false,
- js_printer.Options{ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 } },
- &_linker,
+ js_printer.Options{
+ .to_module_ref = ast.module_ref orelse js_ast.Ref{ .inner_index = 0 },
+ .transform_imports = false,
+ },
+ null,
);
}
};
diff --git a/src/fs.zig b/src/fs.zig
index 5ac8983db..d22f6a7bd 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -17,8 +17,8 @@ threadlocal var scratch_lookup_buffer: [256]u8 = undefined;
pub const Preallocate = struct {
pub const Counts = struct {
- pub const dir_entry: usize = 1024;
- pub const files: usize = 2048;
+ pub const dir_entry: usize = 512;
+ pub const files: usize = 1024;
};
};
@@ -42,7 +42,7 @@ pub const FileSystem = struct {
pub var instance: FileSystem = undefined;
- pub const DirnameStore = allocators.BSSStringList(Preallocate.Counts.dir_entry, 256);
+ pub const DirnameStore = allocators.BSSStringList(Preallocate.Counts.dir_entry, 128);
pub const FilenameStore = allocators.BSSStringList(Preallocate.Counts.files, 64);
pub const Error = error{
diff --git a/src/hash_map.zig b/src/hash_map.zig
index 69c56997c..e625ff83c 100644
--- a/src/hash_map.zig
+++ b/src/hash_map.zig
@@ -165,6 +165,16 @@ pub fn HashMap(
return self.unmanaged.getOrPut(self.allocator, key);
}
+ /// If key exists this function cannot fail.
+ /// If there is an existing item with `key`, then the result
+ /// `Entry` pointer points to it, and found_existing is true.
+ /// Otherwise, puts a new item with undefined value, and
+ /// the `Entry` pointer points to it. Caller should then initialize
+ /// the value (but not the key).
+ pub fn getOrPutWithHash(self: *Self, key: K, hash: u64) !GetOrPutResult {
+ return self.unmanaged.getOrPutWithHash(self.allocator, key, hash);
+ }
+
/// If there is an existing item with `key`, then the result
/// `Entry` pointer points to it, and found_existing is true.
/// Otherwise, puts a new item with undefined value, and
diff --git a/src/import_record.zig b/src/import_record.zig
index 7b1acfd1b..cce23af16 100644
--- a/src/import_record.zig
+++ b/src/import_record.zig
@@ -29,6 +29,8 @@ pub const ImportKind = enum(u8) {
// A CSS "url(...)" token
url,
+ internal,
+
pub fn isFromCSS(k: ImportKind) bool {
return k == .at_conditional or k == .at or k == .url;
}
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 00e76d27e..9fef93679 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -2760,6 +2760,13 @@ pub const Expr = struct {
return maybeSimplifyNot(expr, allocator) orelse expr.*;
}
+ pub fn hasValueForThisInCall(expr: *const Expr) bool {
+ return switch (expr.data) {
+ .e_dot, .e_index => true,
+ else => false,
+ };
+ }
+
// The given "expr" argument should be the operand of a "!" prefix operator
// (i.e. the "x" in "!x"). This returns a simplified expression for the
// whole operator (i.e. the "!x") if it can be simplified, or false if not.
@@ -3403,6 +3410,9 @@ pub const Ast = struct {
approximate_line_count: i32 = 0,
has_lazy_export: bool = false,
+ runtime_import_record: ImportRecord = undefined,
+ needs_runtime: bool = false,
+ externals: []u32 = &[_]u32{},
// This is a list of CommonJS features. When a file uses CommonJS features,
// it's not a candidate for "flat bundling" and must be wrapped in its own
// closure.
diff --git a/src/js_parser/imports.zig b/src/js_parser/imports.zig
index b58c09680..9ab6a9cb2 100644
--- a/src/js_parser/imports.zig
+++ b/src/js_parser/imports.zig
@@ -7,6 +7,7 @@ pub const options = @import("../options.zig");
pub const alloc = @import("../alloc.zig");
pub const js_printer = @import("../js_printer.zig");
pub const renamer = @import("../renamer.zig");
+pub const RuntimeImports = @import("../runtime.zig").Runtime.Imports;
pub const fs = @import("../fs.zig");
const _hash_map = @import("../hash_map.zig");
pub usingnamespace @import("../global.zig");
diff --git a/src/linker.zig b/src/linker.zig
index db9db78e0..26eed096d 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -1,8 +1,65 @@
+usingnamespace @import("global.zig");
+usingnamespace @import("./ast/base.zig");
+
const std = @import("std");
-const fs = @import("fs.zig");
-usingnamespace @import("ast/base.zig");
+const lex = @import("js_lexer.zig");
+const logger = @import("logger.zig");
+const alloc = @import("alloc.zig");
+const Options = @import("options.zig");
+const js_parser = @import("js_parser.zig");
+const json_parser = @import("json_parser.zig");
+const js_printer = @import("js_printer.zig");
+const js_ast = @import("js_ast.zig");
+const panicky = @import("panic_handler.zig");
+const Fs = @import("fs.zig");
+const Api = @import("api/schema.zig").Api;
+const Resolver = @import("./resolver/resolver.zig");
+const sync = @import("sync.zig");
+const ThreadPool = sync.ThreadPool;
+const ThreadSafeHashMap = @import("./thread_safe_hash_map.zig");
+const ImportRecord = @import("./import_record.zig").ImportRecord;
+const allocators = @import("./allocators.zig");
+const MimeType = @import("./http/mime_type.zig");
+const resolve_path = @import("./resolver/resolve_path.zig");
+const _bundler = @import("./bundler.zig");
+const Bundler = _bundler.Bundler;
+const ResolveQueue = _bundler.ResolveQueue;
pub const Linker = struct {
+ allocator: *std.mem.Allocator,
+ options: *Options.BundleOptions,
+ fs: *Fs.FileSystem,
+ log: *logger.Log,
+ resolve_queue: *ResolveQueue,
+ resolver: *Resolver.Resolver,
+ resolve_results: *_bundler.ResolveResults,
+ any_needs_runtime: bool = false,
+ runtime_import_record: ?ImportRecord = null,
+ runtime_source_path: string,
+
+ pub fn init(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ resolve_queue: *ResolveQueue,
+ options: *Options.BundleOptions,
+ resolver: *Resolver.Resolver,
+ resolve_results: *_bundler.ResolveResults,
+ fs: *Fs.FileSystem,
+ ) Linker {
+ relative_paths_list = ImportPathsList.init(allocator);
+
+ return Linker{
+ .allocator = allocator,
+ .options = options,
+ .fs = fs,
+ .log = log,
+ .resolve_queue = resolve_queue,
+ .resolver = resolver,
+ .resolve_results = resolve_results,
+ .runtime_source_path = fs.joinAlloc(allocator, &[_]string{"__runtime.js"}) catch unreachable,
+ };
+ }
+
// fs: fs.FileSystem,
// TODO:
pub fn requireOrImportMetaForSource(c: Linker, source_index: Ref.Int) RequireOrImportMeta {
@@ -12,5 +69,180 @@ pub const Linker = struct {
// This modifies the Ast in-place!
// But more importantly, this does the following:
// - Wrap CommonJS files
- pub fn link(allocator: *std.mem.Allocator, ast: *js_ast.Ast) !void {}
+ pub fn link(linker: *Linker, file_path: Fs.Path, result: *Bundler.ParseResult) !void {
+ var needs_runtime = false;
+ const source_dir = file_path.name.dir;
+ var externals = std.ArrayList(u32).init(linker.allocator);
+
+ // Step 1. Resolve imports & requires
+ switch (result.loader) {
+ .jsx, .js, .ts, .tsx => {
+ for (result.ast.import_records) |*import_record, record_index| {
+ if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
+ if (resolved_import.is_external) {
+ externals.append(@truncate(u32, record_index)) catch unreachable;
+ }
+ linker.processImportRecord(
+ // Include trailing slash
+ file_path.text[0 .. source_dir.len + 1],
+ resolved_import,
+ import_record,
+ ) catch continue;
+ import_record.wrap_with_to_module = resolved_import.shouldAssumeCommonJS(import_record);
+ if (import_record.wrap_with_to_module) {
+ if (!linker.any_needs_runtime) {
+ linker.any_needs_runtime = true;
+ }
+ needs_runtime = true;
+ }
+ } else |err| {
+ switch (err) {
+ error.ModuleNotFound => {
+ if (Resolver.Resolver.isPackagePath(import_record.path.text)) {
+ if (linker.options.platform != .node and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
+ try linker.log.addRangeErrorFmt(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Could not resolve: \"{s}\". Try setting --platform=\"node\"",
+ .{import_record.path.text},
+ );
+ } else {
+ try linker.log.addRangeErrorFmt(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Could not resolve: \"{s}\". Maybe you need to \"npm install\" (or yarn/pnpm)?",
+ .{import_record.path.text},
+ );
+ }
+ } else {
+ try linker.log.addRangeErrorFmt(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Could not resolve: \"{s}\"",
+ .{
+ import_record.path.text,
+ },
+ );
+ continue;
+ }
+ },
+ else => {
+ continue;
+ },
+ }
+ }
+ }
+ },
+ else => {},
+ }
+
+ // Step 2.
+
+ result.ast.externals = externals.toOwnedSlice();
+
+ if (needs_runtime) {
+ std.debug.assert(!result.ast.needs_runtime);
+ result.ast.runtime_import_record = ImportRecord{
+ .path = try linker.generateImportPath(
+ source_dir,
+ linker.runtime_source_path,
+ ),
+ .range = logger.Range.None,
+ .kind = .internal,
+ };
+ result.ast.needs_runtime = true;
+ }
+ }
+
+ const ImportPathsList = allocators.BSSStringList(512, 128);
+ pub var relative_paths_list: *ImportPathsList = undefined;
+ threadlocal var relative_path_allocator: std.heap.FixedBufferAllocator = undefined;
+ threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
+ threadlocal var relative_path_allocator_buf_loaded: bool = false;
+
+ pub fn generateImportPath(linker: *Linker, source_dir: string, source_path: string) !Fs.Path {
+ if (!relative_path_allocator_buf_loaded) {
+ relative_path_allocator_buf_loaded = true;
+ relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
+ }
+ defer relative_path_allocator.reset();
+
+ var pretty = try relative_paths_list.append(linker.fs.relative(source_dir, source_path));
+ var pathname = Fs.PathName.init(pretty);
+ var absolute_pathname = Fs.PathName.init(source_path);
+
+ if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
+ absolute_pathname.ext = ext;
+ }
+
+ switch (linker.options.import_path_format) {
+ .relative => {
+ return Fs.Path.initWithPretty(pretty, pretty);
+ },
+ .relative_nodejs => {
+ var path = Fs.Path.initWithPretty(pretty, pretty);
+ path.text = path.text[0 .. path.text.len - path.name.ext.len];
+ return path;
+ },
+
+ .absolute_url => {
+ const absolute_url = try relative_paths_list.append(
+ try std.fmt.allocPrint(
+ &relative_path_allocator.allocator,
+ "{s}{s}{s}{s}",
+ .{
+ linker.options.public_url,
+ pathname.dir,
+ pathname.base,
+ absolute_pathname.ext,
+ },
+ ),
+ );
+
+ return Fs.Path.initWithPretty(absolute_url, pretty);
+ },
+
+ else => unreachable,
+ }
+ }
+
+ pub fn processImportRecord(linker: *Linker, source_dir: string, resolve_result: *Resolver.Resolver.Result, import_record: *ImportRecord) !void {
+
+ // extremely naive.
+ resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
+
+ // lazy means:
+ // Run the resolver
+ // Don't parse/print automatically.
+ if (linker.options.resolve_mode != .lazy) {
+ try linker.enqueueResolveResult(resolve_result);
+ }
+
+ import_record.path = try linker.generateImportPath(source_dir, resolve_result.path_pair.primary.text);
+ }
+
+ pub fn resolveResultHashKey(linker: *Linker, resolve_result: *const Resolver.Resolver.Result) string {
+ var hash_key = resolve_result.path_pair.primary.text;
+
+ // Shorter hash key is faster to hash
+ if (strings.startsWith(resolve_result.path_pair.primary.text, linker.fs.top_level_dir)) {
+ hash_key = resolve_result.path_pair.primary.text[linker.fs.top_level_dir.len..];
+ }
+
+ return hash_key;
+ }
+
+ pub fn enqueueResolveResult(linker: *Linker, resolve_result: *const Resolver.Resolver.Result) !void {
+ const hash_key = linker.resolveResultHashKey(resolve_result);
+
+ const get_or_put_entry = try linker.resolve_results.backing.getOrPut(hash_key);
+
+ if (!get_or_put_entry.found_existing) {
+ get_or_put_entry.entry.value = resolve_result.*;
+ try linker.resolve_queue.writeItem(resolve_result.*);
+ }
+ }
};
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 15d411596..809b85a32 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -11,6 +11,7 @@ const PackageJSON = @import("./package_json.zig").PackageJSON;
usingnamespace @import("./data_url.zig");
const Wyhash = std.hash.Wyhash;
+
const hash_map_v2 = @import("../hash_map_v2.zig");
const Mutex = sync.Mutex;
const StringBoolMap = std.StringHashMap(bool);
@@ -268,8 +269,16 @@ pub const Resolver = struct {
// Most NPM modules are CommonJS
// If unspecified, assume CommonJS.
// If internal app code, assume ESM. Since this is designed for ESM.`
- pub fn shouldAssumeCommonJS(r: *Result) bool {
- return r.is_from_node_modules and r.module_type != .esm;
+ pub fn shouldAssumeCommonJS(r: *const Result, import_record: *const ast.ImportRecord) bool {
+ if (import_record.kind == .require or import_record.kind == .require_resolve or r.module_type == .cjs) {
+ return true;
+ }
+
+ if (r.module_type == .esm) {
+ return false;
+ }
+
+ return r.is_from_node_modules and import_record.contains_default_alias;
}
pub const DebugMeta = struct {
diff --git a/src/runtime.js b/src/runtime.js
index 11d5be083..045e7d8b6 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -38,3 +38,16 @@ export var __toModule = (module) => {
module
);
};
+
+export var __name = (target, name) => {
+ Object.defineProperty(target, "name", {
+ get() {
+ return name;
+ },
+ set(v) {
+ name = v;
+ },
+ });
+
+ return target;
+};