aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/allocators.zig13
-rw-r--r--src/bundler.zig48
-rw-r--r--src/cache.zig8
-rw-r--r--src/feature_flags.zig2
-rw-r--r--src/fs.zig112
-rw-r--r--src/http.zig9
-rw-r--r--src/javascript/jsc/javascript.zig4
-rw-r--r--src/js_parser/js_parser.zig2
-rw-r--r--src/linker.zig19
-rw-r--r--src/resolver/resolver.zig89
-rw-r--r--src/runtime.footer.js1
-rw-r--r--src/runtime.js29
-rw-r--r--src/runtime.version2
-rw-r--r--src/runtime.zig4
14 files changed, 268 insertions, 74 deletions
diff --git a/src/allocators.zig b/src/allocators.zig
index 4b7bde391..b2b8244f4 100644
--- a/src/allocators.zig
+++ b/src/allocators.zig
@@ -65,6 +65,17 @@ pub fn isSliceInBuffer(slice: anytype, buffer: anytype) bool {
return (@ptrToInt(buffer) <= @ptrToInt(slice.ptr) and (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer) + buffer.len));
}
+pub fn sliceRange(slice: []const u8, buffer: []const u8) ?[2]u32 {
+ return if (@ptrToInt(buffer.ptr) <= @ptrToInt(slice.ptr) and
+ (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer.ptr) + buffer.len))
+ [2]u32{
+ @truncate(u32, @ptrToInt(slice.ptr) - @ptrToInt(buffer.ptr)),
+ @truncate(u32, slice.len),
+ }
+ else
+ null;
+}
+
pub const IndexType = packed struct {
index: u31,
is_overflow: bool = false,
@@ -306,7 +317,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
}
pub fn exists(self: *Self, value: ValueType) bool {
- return isSliceInBuffer(value, slice_buf);
+ return isSliceInBuffer(value, &backing_buf);
}
pub fn editableSlice(slice: []const u8) []u8 {
diff --git a/src/bundler.zig b/src/bundler.zig
index 53b3a2f04..26d59ca93 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -570,15 +570,14 @@ pub fn NewBundler(cache_files: bool) type {
pub fn enqueueItem(this: *GenerateNodeModuleBundle, resolve: _resolver.Result) !void {
var result = resolve;
var path = result.path() orelse return;
+
const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file;
if (!loader.isJavaScriptLikeOrJSON()) return;
- if (BundledModuleData.get(this, &result)) |mod| {
- path.* = Fs.Path.init(std.mem.span(try this.allocator.dupeZ(u8, path.text)));
+ path.* = try path.dupeAlloc(this.allocator);
+ if (BundledModuleData.get(this, &result)) |mod| {
try this.queue.upsert(mod.module_id, result);
} else {
- path.* = Fs.Path.init(std.mem.span(try this.allocator.dupeZ(u8, path.text)));
-
try this.queue.upsert(result.hash(this.bundler.fs.top_level_dir, loader), result);
}
}
@@ -1025,11 +1024,15 @@ pub fn NewBundler(cache_files: bool) type {
module_id: u32,
pub fn get(this: *GenerateNodeModuleBundle, resolve_result: *const _resolver.Result) ?BundledModuleData {
- var path = resolve_result.pathConst() orelse return null;
- const package_json: *const PackageJSON = this.bundler.resolver.rootNodeModulePackageJSON(resolve_result) orelse return null;
- const package_base_path = package_json.source.path.name.dirWithTrailingSlash();
- const import_path = path.text[package_base_path.len..];
- const package_path = path.text[package_base_path.len - package_json.name.len - 1 ..];
+ const path = resolve_result.pathConst() orelse return null;
+ var base_path = path.text;
+ const package_json: *const PackageJSON = this.bundler.resolver.rootNodeModulePackageJSON(
+ resolve_result,
+ &base_path,
+ ) orelse return null;
+ const package_base_path = package_json.source.path.sourceDir();
+ const import_path = base_path[package_base_path.len..];
+ const package_path = base_path[package_base_path.len - package_json.name.len - 1 ..];
return BundledModuleData{
.import_path = import_path,
@@ -1051,7 +1054,7 @@ pub fn NewBundler(cache_files: bool) type {
const is_from_node_modules = resolve.isLikelyNodeModule();
var file_path = (resolve.pathConst() orelse unreachable).*;
-
+ const source_dir = file_path.sourceDir();
const loader = this.bundler.options.loader(file_path.name.ext);
var bundler = this.bundler;
defer scan_pass_result.reset();
@@ -1073,7 +1076,7 @@ pub fn NewBundler(cache_files: bool) type {
bundler.fs,
file_path.text,
resolve.dirname_fd,
- null,
+ if (resolve.file_fd != 0) resolve.file_fd else null,
shared_buffer,
);
@@ -1110,7 +1113,6 @@ pub fn NewBundler(cache_files: bool) type {
},
bundler.allocator,
) catch return null;
- const source_dir = file_path.name.dirWithTrailingSlash();
var jsx = bundler.options.jsx;
jsx.parse = loader.isJSX();
@@ -1153,12 +1155,21 @@ pub fn NewBundler(cache_files: bool) type {
continue;
}
+ // if (_resolved_import.package_json == null) |pkg_json| {
+ // _resolved_import.package_json = if (pkg_json.hash == resolve.package_json.?.hash)
+ // resolve.package_json
+ // else
+ // _resolved_import.package_json;
+ // }
+
const resolved_import: *const _resolver.Result = _resolved_import;
- const _module_data = BundledModuleData.get(this, resolved_import) orelse continue;
+ const _module_data = BundledModuleData.get(this, resolved_import) orelse unreachable;
import_record.module_id = _module_data.module_id;
+ std.debug.assert(import_record.module_id != 0);
import_record.is_bundled = true;
- path.* = Fs.Path.init(this.allocator.dupeZ(u8, path.text) catch unreachable);
+
+ path.* = try path.dupeAlloc(this.allocator);
import_record.path = path.*;
@@ -1416,13 +1427,12 @@ pub fn NewBundler(cache_files: bool) type {
bundler.fs,
file_path.text,
resolve.dirname_fd,
- null,
+ if (resolve.file_fd != 0) resolve.file_fd else null,
shared_buffer,
) catch return;
if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) return;
const source = logger.Source.initRecycledFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
- const source_dir = file_path.name.dirWithTrailingSlash();
var jsx = bundler.options.jsx;
jsx.parse = loader.isJSX();
@@ -1452,9 +1462,10 @@ pub fn NewBundler(cache_files: bool) type {
const loader_ = this.bundler.options.loader(path.name.ext);
if (!loader_.isJavaScriptLikeOrJSON()) continue;
- path.* = Fs.Path.init(std.mem.span(try this.allocator.dupeZ(u8, path.text)));
+ path.* = try path.dupeAlloc(this.allocator);
if (BundledModuleData.get(this, _resolved_import)) |mod| {
+ std.debug.assert(mod.module_id != 0);
try this.queue.upsert(
mod.module_id,
_resolved_import.*,
@@ -1556,7 +1567,7 @@ pub fn NewBundler(cache_files: bool) type {
if (strings.indexOf(file_path.text, bundler.fs.top_level_dir)) |i| {
file_path.pretty = file_path.text[i + bundler.fs.top_level_dir.len ..];
- } else {
+ } else if (!file_path.is_symlink) {
file_path.pretty = allocator.dupe(u8, bundler.fs.relativeTo(file_path.text)) catch unreachable;
}
@@ -1765,6 +1776,7 @@ pub fn NewBundler(cache_files: bool) type {
&source,
file,
&bundler.linker,
+ bundler.log,
);
try css_writer.run(bundler.log, bundler.allocator);
output_file.size = css_writer.written;
diff --git a/src/cache.zig b/src/cache.zig
index 533e35a38..c2e56e6c8 100644
--- a/src/cache.zig
+++ b/src/cache.zig
@@ -73,7 +73,7 @@ pub fn NewCache(comptime cache_files: bool) type {
) !Entry {
var rfs = _fs.fs;
- if (cache_files) {
+ if (comptime cache_files) {
{
c.mutex.lock();
defer c.mutex.unlock();
@@ -138,7 +138,7 @@ pub fn NewCache(comptime cache_files: bool) type {
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
};
- if (cache_files) {
+ if (comptime cache_files) {
c.mutex.lock();
defer c.mutex.unlock();
var res = c.entries.getOrPut(path) catch unreachable;
@@ -163,7 +163,7 @@ pub fn NewCache(comptime cache_files: bool) type {
) !Entry {
var rfs = _fs.fs;
- if (cache_files) {
+ if (comptime cache_files) {
{
c.mutex.lock();
defer c.mutex.unlock();
@@ -228,7 +228,7 @@ pub fn NewCache(comptime cache_files: bool) type {
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
};
- if (cache_files) {
+ if (comptime cache_files) {
c.mutex.lock();
defer c.mutex.unlock();
var res = c.entries.getOrPut(path) catch unreachable;
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index c5b11c5f9..33a4eaee1 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -40,6 +40,8 @@ pub const verbose_fs = false;
pub const watch_directories = true;
+pub const tailwind_css_at_keyword = true;
+
// This feature flag exists so when you have defines inside package.json, you can use single quotes in nested strings.
pub const allow_json_single_quotes = true;
diff --git a/src/fs.zig b/src/fs.zig
index e59d24b6f..4190710e0 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -356,6 +356,7 @@ pub const FileSystem = struct {
pub const Cache = struct {
symlink: string = "",
+ fd: StoredFileDescriptorType = 0,
kind: Kind = Kind.file,
};
@@ -905,6 +906,8 @@ pub const FileSystem = struct {
defer {
if (fs.needToCloseFiles()) {
file.close();
+ } else {
+ cache.fd = file.handle;
}
}
const _stat = try file.stat();
@@ -922,7 +925,7 @@ pub const FileSystem = struct {
cache.kind = .file;
}
if (symlink.len > 0) {
- cache.symlink = try fs.allocator.dupe(u8, symlink);
+ cache.symlink = try FilenameStore.instance.append([]const u8, symlink);
}
return cache;
@@ -972,7 +975,7 @@ pub const PathName = struct {
return MutableString.ensureValidIdentifier(self.base, allocator);
}
- pub fn dirWithTrailingSlash(this: *const PathName) string {
+ pub inline fn dirWithTrailingSlash(this: *const PathName) string {
// The three strings basically always point to the same underlying ptr
// so if dir does not have a trailing slash, but is spaced one apart from the basename
// we can assume there is a trailing slash there
@@ -1034,10 +1037,10 @@ threadlocal var join_buf: [1024]u8 = undefined;
pub const Path = struct {
pretty: string,
text: string,
- non_symlink: string = "",
namespace: string = "unspecified",
name: PathName,
is_disabled: bool = false,
+ is_symlink: bool = false,
pub const PackageRelative = struct {
path: string,
@@ -1045,6 +1048,88 @@ pub const Path = struct {
is_parent_package: bool = false,
};
+ pub inline fn sourceDir(this: *const Path) string {
+ return this.name.dirWithTrailingSlash();
+ }
+
+ pub inline fn prettyDir(this: *const Path) string {
+ return this.name.dirWithTrailingSlash();
+ }
+
+ // This duplicates but only when strictly necessary
+ // This will skip allocating if it's already in FilenameStore or DirnameStore
+ pub fn dupeAlloc(this: *const Path, allocator: *std.mem.Allocator) !Fs.Path {
+ if (this.text.ptr == this.pretty.ptr and this.text.len == this.text.len) {
+ if (FileSystem.FilenameStore.instance.exists(this.text) or FileSystem.DirnameStore.instance.exists(this.text)) {
+ return this.*;
+ }
+
+ var new_path = Fs.Path.init(try FileSystem.FilenameStore.instance.append([]const u8, this.text));
+ new_path.pretty = this.text;
+ new_path.namespace = this.namespace;
+ new_path.is_symlink = this.is_symlink;
+ return new_path;
+ } else if (this.pretty.len == 0) {
+ if (FileSystem.FilenameStore.instance.exists(this.text) or FileSystem.DirnameStore.instance.exists(this.text)) {
+ return this.*;
+ }
+
+ var new_path = Fs.Path.init(try FileSystem.FilenameStore.instance.append([]const u8, this.text));
+ new_path.pretty = "";
+ new_path.namespace = this.namespace;
+ new_path.is_symlink = this.is_symlink;
+ return new_path;
+ } else if (allocators.sliceRange(this.pretty, this.text)) |start_end| {
+ if (FileSystem.FilenameStore.instance.exists(this.text) or FileSystem.DirnameStore.instance.exists(this.text)) {
+ return this.*;
+ }
+ var new_path = Fs.Path.init(try FileSystem.FilenameStore.instance.append([]const u8, this.text));
+ new_path.pretty = this.text[start_end[0]..start_end[1]];
+ new_path.namespace = this.namespace;
+ new_path.is_symlink = this.is_symlink;
+ return new_path;
+ } else {
+ if ((FileSystem.FilenameStore.instance.exists(this.text) or
+ FileSystem.DirnameStore.instance.exists(this.text)) and
+ (FileSystem.FilenameStore.instance.exists(this.pretty) or
+ FileSystem.DirnameStore.instance.exists(this.pretty)))
+ {
+ return this.*;
+ }
+
+ if (strings.indexOf(this.text, this.pretty)) |offset| {
+ var text = try FileSystem.FilenameStore.instance.append([]const u8, this.text);
+ var new_path = Fs.Path.init(text);
+ new_path.pretty = text[offset..][0..this.pretty.len];
+ new_path.namespace = this.namespace;
+ new_path.is_symlink = this.is_symlink;
+ return new_path;
+ } else {
+ var buf = try allocator.alloc(u8, this.text.len + this.pretty.len + 2);
+ std.mem.copy(u8, buf, this.text);
+ buf.ptr[this.text.len] = 0;
+ var new_pretty = buf[this.text.len + 1 ..];
+ std.mem.copy(u8, buf[this.text.len + 1 ..], this.pretty);
+ var new_path = Fs.Path.init(buf[0..this.text.len]);
+ buf.ptr[buf.len - 1] = 0;
+ new_path.pretty = new_pretty;
+ new_path.namespace = this.namespace;
+ new_path.is_symlink = this.is_symlink;
+ return new_path;
+ }
+ }
+ }
+
+ pub const empty = Fs.Path.init("");
+
+ pub fn setRealpath(this: *Path, to: string) void {
+ const old_path = this.text;
+ this.text = to;
+ this.name = PathName.init(to);
+ this.pretty = old_path;
+ this.is_symlink = true;
+ }
+
pub fn jsonStringify(self: *const @This(), options: anytype, writer: anytype) !void {
return try std.json.stringify(self.text, options, writer);
}
@@ -1054,15 +1139,30 @@ pub const Path = struct {
}
pub fn init(text: string) Path {
- return Path{ .pretty = text, .text = text, .namespace = "file", .name = PathName.init(text) };
+ return Path{
+ .pretty = text,
+ .text = text,
+ .namespace = "file",
+ .name = PathName.init(text),
+ };
}
pub fn initWithPretty(text: string, pretty: string) Path {
- return Path{ .pretty = pretty, .text = text, .namespace = "file", .name = PathName.init(text) };
+ return Path{
+ .pretty = pretty,
+ .text = text,
+ .namespace = "file",
+ .name = PathName.init(text),
+ };
}
pub fn initWithNamespace(text: string, namespace: string) Path {
- return Path{ .pretty = text, .text = text, .namespace = namespace, .name = PathName.init(text) };
+ return Path{
+ .pretty = text,
+ .text = text,
+ .namespace = namespace,
+ .name = PathName.init(text),
+ };
}
pub fn isBefore(a: *Path, b: Path) bool {
diff --git a/src/http.zig b/src/http.zig
index fd17f2021..e9fd74ee1 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -531,6 +531,8 @@ pub const RequestContext = struct {
}
pub fn sendNotFound(req: *RequestContext) !void {
+ std.debug.assert(!req.has_called_done);
+
defer req.done();
try req.writeStatus(404);
try req.flushHeaders();
@@ -1126,7 +1128,9 @@ pub const RequestContext = struct {
while (true) {
defer {
JavaScript.VirtualMachine.vm.flush();
- std.debug.assert(ZigGlobalObject.resetModuleRegistryMap(vm.global, module_map));
+ std.debug.assert(
+ ZigGlobalObject.resetModuleRegistryMap(vm.global, module_map),
+ );
js_ast.Stmt.Data.Store.reset();
js_ast.Expr.Data.Store.reset();
JavaScript.Bun.flushCSSImports();
@@ -2458,7 +2462,6 @@ pub const Server = struct {
else => {
Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
- return;
},
}
};
@@ -2474,7 +2477,6 @@ pub const Server = struct {
else => {
Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
- return;
},
}
};
@@ -2489,7 +2491,6 @@ pub const Server = struct {
req_ctx.renderServeResult(result) catch |err| {
Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
- return;
};
}
}
diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig
index 7c7fb48c2..8bbfa3e74 100644
--- a/src/javascript/jsc/javascript.zig
+++ b/src/javascript/jsc/javascript.zig
@@ -518,7 +518,7 @@ pub const VirtualMachine = struct {
file_path,
&parse_result,
.absolute_path,
- true,
+ false,
);
if (!source_code_printer_loaded) {
@@ -601,7 +601,7 @@ pub const VirtualMachine = struct {
path,
&parse_result,
.absolute_path,
- true,
+ false,
);
vm.resolved_count += vm.bundler.linker.import_counter - start_count;
vm.bundler.linker.import_counter = 0;
diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig
index 590f8b168..92f709557 100644
--- a/src/js_parser/js_parser.zig
+++ b/src/js_parser/js_parser.zig
@@ -1906,7 +1906,7 @@ pub const Parser = struct {
exports_kind = .cjs;
if (p.options.transform_require_to_import) {
var args = p.allocator.alloc(Expr, 2) catch unreachable;
- wrapper_expr = p.callRuntime(logger.Loc.Empty, "__commonJS", args);
+ wrapper_expr = p.callRuntime(logger.Loc.Empty, "__cJS2eSM", args);
// Disable HMR if we're wrapping it in CommonJS
// It's technically possible to support this.
diff --git a/src/linker.zig b/src/linker.zig
index 09fac6845..56912b9b0 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -187,10 +187,12 @@ pub fn NewLinker(comptime BundlerType: type) type {
comptime ignore_runtime: bool,
) !void {
var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny();
- const source_dir = file_path.name.dir;
+ const source_dir = if (file_path.is_symlink and file_path.pretty.len > 0 and import_path_format == .absolute_url and linker.options.platform != .bun)
+ Fs.PathName.init(file_path.pretty).dirWithTrailingSlash()
+ else
+ file_path.sourceDir();
var externals = std.ArrayList(u32).init(linker.allocator);
var needs_bundle = false;
- const process_import_record_path = file_path.text[0..std.math.min(source_dir.len + 1, file_path.text.len)];
var first_bundled_index: ?u32 = null;
// Step 1. Resolve imports & requires
@@ -244,7 +246,9 @@ pub fn NewLinker(comptime BundlerType: type) type {
break :bundled;
};
if (package_json_) |package_json| {
- if (strings.contains(package_json.source.path.name.dirWithTrailingSlash(), "node_modules")) {
+ const package_base_dir = package_json.source.path.sourceDir();
+ const node_module_root = std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str;
+ if (strings.lastIndexOf(package_base_dir, node_module_root)) |last_node_modules| {
if (node_modules_bundle.getPackageIDByName(package_json.name)) |possible_pkg_ids| {
const pkg_id: u32 = brk: {
for (possible_pkg_ids) |pkg_id| {
@@ -275,8 +279,8 @@ pub fn NewLinker(comptime BundlerType: type) type {
}
const package_relative_path = linker.fs.relative(
- package_json.source.path.name.dirWithTrailingSlash(),
- path.text,
+ package_base_dir,
+ path.pretty,
);
const found_module = node_modules_bundle.findModuleInPackage(package, package_relative_path) orelse {
@@ -319,7 +323,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
loader,
// Include trailing slash
- process_import_record_path,
+ source_dir,
resolved_import,
import_record,
import_path_format,
@@ -574,10 +578,11 @@ pub fn NewLinker(comptime BundlerType: type) type {
if (linker.options.resolve_mode != .lazy) {
_ = try linker.enqueueResolveResult(resolve_result);
}
+ const path = resolve_result.pathConst() orelse unreachable;
import_record.path = try linker.generateImportPath(
source_dir,
- resolve_result.pathConst().?.text,
+ if (path.is_symlink and import_path_format == .absolute_url and linker.options.platform != .bun) path.pretty else path.text,
if (resolve_result.package_json) |package_json| package_json.version else "",
BundlerType.isCacheEnabled and loader == .file,
import_path_format,
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 8dffbb497..56143ed56 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -125,8 +125,8 @@ pub const Result = struct {
// remember: non-node_modules can have package.json
// checking package.json may not be relevant
pub fn isLikelyNodeModule(this: *const Result) bool {
- const dir = this.path_pair.primary.name.dirWithTrailingSlash();
- return strings.indexOf(dir, "/node_modules/") != null;
+ const path_ = this.pathConst() orelse return false;
+ return strings.indexOf(path_.text, "/node_modules/") != null;
}
// Most NPM modules are CommonJS
@@ -500,27 +500,28 @@ pub fn NewResolver(cache_files: bool) type {
const json = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError;
pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference);
- const dir = pkg.source.path.name.dirWithTrailingSlash();
+ const dir = pkg.source.path.sourceDir();
+
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
if (pair.framework.client.isEnabled()) {
var parts = [_]string{ dir, pair.framework.client.path };
const abs = r.fs.abs(&parts);
- pair.framework.client.path = try r.allocator.dupe(u8, try std.os.realpath(abs, &buf));
+ pair.framework.client.path = try r.allocator.dupe(u8, abs);
pair.framework.resolved = true;
}
if (pair.framework.server.isEnabled()) {
var parts = [_]string{ dir, pair.framework.server.path };
const abs = r.fs.abs(&parts);
- pair.framework.server.path = try r.allocator.dupe(u8, try std.os.realpath(abs, &buf));
+ pair.framework.server.path = try r.allocator.dupe(u8, abs);
pair.framework.resolved = true;
}
if (pair.framework.fallback.isEnabled()) {
var parts = [_]string{ dir, pair.framework.fallback.path };
const abs = r.fs.abs(&parts);
- pair.framework.fallback.path = try r.allocator.dupe(u8, try std.os.realpath(abs, &buf));
+ pair.framework.fallback.path = try r.allocator.dupe(u8, abs);
pair.framework.resolved = true;
}
@@ -647,50 +648,80 @@ pub fn NewResolver(cache_files: bool) type {
pub fn finalizeResult(r: *ThisResolver, result: *Result) !void {
if (result.is_external) return;
- if (result.package_json) |package_json| {
- result.module_type = switch (package_json.module_type) {
- .esm, .cjs => package_json.module_type,
- .unknown => result.module_type,
- };
- }
-
var iter = result.path_pair.iter();
while (iter.next()) |path| {
var dir: *DirInfo = (r.readDirInfo(path.name.dir) catch continue) orelse continue;
+ result.package_json = result.package_json orelse dir.package_json;
+
if (dir.getEntries()) |entries| {
if (entries.get(path.name.filename)) |query| {
const symlink_path = query.entry.symlink(&r.fs.fs);
if (symlink_path.len > 0) {
- path.non_symlink = path.text;
- // Is this entry itself a symlink?
- path.text = symlink_path;
- path.name = Fs.PathName.init(path.text);
+ path.setRealpath(symlink_path);
+ if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.non_symlink, path.text }) catch {};
+ debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path }) catch {};
}
} else if (dir.abs_real_path.len > 0) {
- path.non_symlink = path.text;
var parts = [_]string{ dir.abs_real_path, query.entry.base() };
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+
var out = r.fs.absBuf(&parts, &buf);
+
+ if (query.entry.cache.fd == 0) {
+ buf[out.len] = 0;
+ const span = buf[0..out.len :0];
+ var file = try std.fs.openFileAbsoluteZ(span, .{ .read = true });
+
+ if (comptime !FeatureFlags.store_file_descriptors) {
+ out = try std.os.getFdPath(query.entry.cache.fd, &buf);
+ file.close();
+ } else {
+ query.entry.cache.fd = file.handle;
+ Fs.FileSystem.setMaxFd(file.handle);
+ }
+ }
+
+ defer {
+ if (r.fs.fs.needToCloseFiles()) {
+ if (query.entry.cache.fd != 0) {
+ var file = std.fs.File{ .handle = query.entry.cache.fd };
+ file.close();
+ query.entry.cache.fd = 0;
+ }
+ }
+ }
+
+ if (comptime FeatureFlags.store_file_descriptors) {
+ out = try std.os.getFdPath(query.entry.cache.fd, &buf);
+ }
+
const symlink = try Fs.FileSystem.FilenameStore.instance.append(@TypeOf(out), out);
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text }) catch {};
}
query.entry.cache.symlink = symlink;
+ if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
- path.name = Fs.PathName.init(path.text);
+ path.setRealpath(symlink);
}
}
}
}
+
+ if (result.package_json) |package_json| {
+ result.module_type = switch (package_json.module_type) {
+ .esm, .cjs => package_json.module_type,
+ .unknown => result.module_type,
+ };
+ }
}
pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !?Result {
// This implements the module resolution algorithm from node.js, which is
// described here: https://nodejs.org/api/modules.html#modules_all_together
- var result: Result = Result{ .path_pair = PathPair{ .primary = Path.init("") } };
+ var result: Result = Result{ .path_pair = PathPair{ .primary = Path.empty } };
// Return early if this is already an absolute path. In addition to asking
// the file system whether this is an absolute path, we also explicitly check
@@ -969,11 +1000,21 @@ pub fn NewResolver(cache_files: bool) type {
pub fn rootNodeModulePackageJSON(
r: *ThisResolver,
result: *const Result,
+ base_path: *string,
) ?*const PackageJSON {
- const absolute = (result.pathConst() orelse return null).text;
+ const path = (result.pathConst() orelse return null);
+ var absolute = path.text;
// /foo/node_modules/@babel/standalone/index.js
// ^------------^
- var end = strings.lastIndexOf(absolute, node_module_root_string) orelse return null;
+ var end = strings.lastIndexOf(absolute, node_module_root_string) orelse brk: {
+ // try non-symlinked version
+ if (path.pretty.len != absolute.len) {
+ absolute = path.pretty;
+ break :brk strings.lastIndexOf(absolute, node_module_root_string);
+ }
+
+ break :brk null;
+ } orelse return null;
end += node_module_root_string.len;
const is_scoped_package = absolute[end] == '@';
@@ -996,11 +1037,13 @@ pub fn NewResolver(cache_files: bool) type {
// That can cause filesystem lookups in parent directories and it requires a lock
if (result.package_json) |pkg| {
if (strings.eql(slice, pkg.source.path.name.dirWithTrailingSlash())) {
+ base_path.* = absolute;
return pkg;
}
}
const dir_info = (r.dirInfoCached(slice) catch null) orelse return null;
+ base_path.* = absolute;
return dir_info.package_json;
}
diff --git a/src/runtime.footer.js b/src/runtime.footer.js
index 293c6d341..4d8e57e03 100644
--- a/src/runtime.footer.js
+++ b/src/runtime.footer.js
@@ -14,3 +14,4 @@ export var __require = BUN_RUNTIME.__require;
export var __name = BUN_RUNTIME.__name;
export var __export = BUN_RUNTIME.__export;
export var __reExport = BUN_RUNTIME.__reExport;
+export var __cJS2eSM = BUN_RUNTIME.__cJS2eSM;
diff --git a/src/runtime.js b/src/runtime.js
index a76b3f077..bcfd4c3fb 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -42,7 +42,7 @@ export var __commonJS = (cb, name) => {
var has_run = false;
return {
- [`[load] ${name}`]() {
+ [`require(${name})`]() {
if (has_run) {
return mod.exports;
}
@@ -53,9 +53,25 @@ export var __commonJS = (cb, name) => {
cb(mod, mod.exports);
+ const kind = typeof mod.exports;
+
// If it's a default-only export, don't crash if they call .default on the module
if (
- typeof mod.exports === "object" &&
+ kind === "object" &&
+ "default" in mod.exports &&
+ Object.keys(mod.exports).len === 1
+ ) {
+ mod.exports = mod.exports.default;
+ Object.defineProperty(mod.exports, "default", {
+ get() {
+ return mod.exports;
+ },
+ enumerable: true,
+ configurable: true,
+ });
+ // If it's a namespace export without .default, pretend .default is the same as mod.exports
+ } else if (
+ kind === "object" &&
"default" in mod.exports &&
Object.keys(mod.exports).len === 1
) {
@@ -69,8 +85,7 @@ export var __commonJS = (cb, name) => {
});
// If it's a namespace export without .default, pretend .default is the same as mod.exports
} else if (
- (typeof mod.exports === "object" ||
- typeof mod.exports === "function") &&
+ (kind === "function" || kind === "object") &&
!("default" in mod.exports)
) {
var defaultValue = mod.exports;
@@ -88,7 +103,11 @@ export var __commonJS = (cb, name) => {
return mod.exports;
},
- }[`[load] ${name}`];
+ }[`require(${name})`];
+};
+
+export var __cJS2eSM = (cb, name) => {
+ return __commonJS(cb, name)();
};
var require_cache = new WeakMap();
diff --git a/src/runtime.version b/src/runtime.version
index ffb6d2620..30f397861 100644
--- a/src/runtime.version
+++ b/src/runtime.version
@@ -1 +1 @@
-cab4fe7bfaf335f6 \ No newline at end of file
+494c65d79c81935d \ No newline at end of file
diff --git a/src/runtime.zig b/src/runtime.zig
index b694aa41a..c426ccb07 100644
--- a/src/runtime.zig
+++ b/src/runtime.zig
@@ -161,7 +161,7 @@ pub const Runtime = struct {
pub const Imports = struct {
__name: ?Ref = null,
__toModule: ?Ref = null,
- __commonJS: ?Ref = null,
+ __cJS2eSM: ?Ref = null,
__require: ?Ref = null,
__export: ?Ref = null,
__reExport: ?Ref = null,
@@ -177,7 +177,7 @@ pub const Runtime = struct {
"__name",
"__toModule",
"__require",
- "__commonJS",
+ "__cJS2eSM",
"__export",
"__reExport",
"__load",