aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--schema.js725
-rw-r--r--src/allocators.zig125
-rw-r--r--src/bundler.zig722
-rw-r--r--src/cache.zig33
-rw-r--r--src/cli.zig73
-rw-r--r--src/defines.zig20
-rw-r--r--src/fs.zig4
-rw-r--r--src/http.zig9
-rw-r--r--src/import_record.zig6
-rw-r--r--src/js_ast.zig26
-rw-r--r--src/js_parser/js_parser.zig308
-rw-r--r--src/js_printer.zig58
-rw-r--r--src/linker.zig36
-rw-r--r--src/logger.zig50
-rw-r--r--src/node_module_bundle.zig69
-rw-r--r--src/options.zig9
-rw-r--r--src/resolver/package_json.zig7
-rw-r--r--src/resolver/resolve_path.zig1
-rw-r--r--src/resolver/resolver.zig95
-rw-r--r--src/test/fixtures/export-from.js2
-rw-r--r--src/test/fixtures/nql-define.2.js39
-rw-r--r--src/test/fixtures/nql-define.js41
22 files changed, 1411 insertions, 1047 deletions
diff --git a/schema.js b/schema.js
deleted file mode 100644
index 21509e3d6..000000000
--- a/schema.js
+++ /dev/null
@@ -1,725 +0,0 @@
-const Loader = {
- "1": 1,
- "2": 2,
- "3": 3,
- "4": 4,
- "5": 5,
- "6": 6,
- "7": 7,
- jsx: 1,
- js: 2,
- ts: 3,
- tsx: 4,
- css: 5,
- file: 6,
- json: 7
-};
-const LoaderKeys = {
- "1": "jsx",
- "2": "js",
- "3": "ts",
- "4": "tsx",
- "5": "css",
- "6": "file",
- "7": "json",
- jsx: "jsx",
- js: "js",
- ts: "ts",
- tsx: "tsx",
- css: "css",
- file: "file",
- json: "json"
-};
-const ResolveMode = {
- "1": 1,
- "2": 2,
- "3": 3,
- "4": 4,
- disable: 1,
- lazy: 2,
- dev: 3,
- bundle: 4
-};
-const ResolveModeKeys = {
- "1": "disable",
- "2": "lazy",
- "3": "dev",
- "4": "bundle",
- disable: "disable",
- lazy: "lazy",
- dev: "dev",
- bundle: "bundle"
-};
-const Platform = {
- "1": 1,
- "2": 2,
- browser: 1,
- node: 2
-};
-const PlatformKeys = {
- "1": "browser",
- "2": "node",
- browser: "browser",
- node: "node"
-};
-const JSXRuntime = {
- "1": 1,
- "2": 2,
- automatic: 1,
- classic: 2
-};
-const JSXRuntimeKeys = {
- "1": "automatic",
- "2": "classic",
- automatic: "automatic",
- classic: "classic"
-};
-function decodeJSX(bb) {
- var result = {};
- result["factory"] = bb.readString();
- result["runtime"] = JSXRuntime[bb.readByte()];
- result["fragment"] = bb.readString();
- result["development"] = !!bb.readByte();
- result["import_source"] = bb.readString();
- result["react_fast_refresh"] = !!bb.readByte();
- return result;
-}
-function encodeJSX(message, bb) {
- var value = message["factory"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"factory\"");
- var value = message["runtime"];
- if (value != null) {
- var encoded = JSXRuntime[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"JSXRuntime\"");
- bb.writeByte(encoded);
- } else
- throw new Error("Missing required field \"runtime\"");
- var value = message["fragment"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"fragment\"");
- var value = message["development"];
- if (value != null)
- bb.writeByte(value);
- else
- throw new Error("Missing required field \"development\"");
- var value = message["import_source"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"import_source\"");
- var value = message["react_fast_refresh"];
- if (value != null)
- bb.writeByte(value);
- else
- throw new Error("Missing required field \"react_fast_refresh\"");
-}
-function decodeTransformOptions(bb) {
- var result = {};
- while (true)
- switch (bb.readByte()) {
- case 0:
- return result;
- case 1:
- result["jsx"] = decodeJSX(bb);
- break;
- case 2:
- result["tsconfig_override"] = bb.readString();
- break;
- case 3:
- result["resolve"] = ResolveMode[bb.readByte()];
- break;
- case 4:
- result["public_url"] = bb.readString();
- break;
- case 5:
- result["absolute_working_dir"] = bb.readString();
- break;
- case 6:
- var length = bb.readVarUint();
- var values = result["define_keys"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 7:
- var length = bb.readVarUint();
- var values = result["define_values"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 8:
- result["preserve_symlinks"] = !!bb.readByte();
- break;
- case 9:
- var length = bb.readVarUint();
- var values = result["entry_points"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 10:
- result["write"] = !!bb.readByte();
- break;
- case 11:
- var length = bb.readVarUint();
- var values = result["inject"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 12:
- result["output_dir"] = bb.readString();
- break;
- case 13:
- var length = bb.readVarUint();
- var values = result["external"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 14:
- var length = bb.readVarUint();
- var values = result["loader_keys"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 15:
- var length = bb.readVarUint();
- var values = result["loader_values"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = Loader[bb.readByte()];
- break;
- case 16:
- var length = bb.readVarUint();
- var values = result["main_fields"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 17:
- result["platform"] = Platform[bb.readByte()];
- break;
- case 18:
- result["serve"] = !!bb.readByte();
- break;
- case 19:
- var length = bb.readVarUint();
- var values = result["extension_order"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = bb.readString();
- break;
- case 20:
- result["public_dir"] = bb.readString();
- break;
- default:
- throw new Error("Attempted to parse invalid message");
- }
-}
-function encodeTransformOptions(message, bb) {
- var value = message["jsx"];
- if (value != null) {
- bb.writeByte(1);
- encodeJSX(value, bb);
- }
- var value = message["tsconfig_override"];
- if (value != null) {
- bb.writeByte(2);
- bb.writeString(value);
- }
- var value = message["resolve"];
- if (value != null) {
- bb.writeByte(3);
- var encoded = ResolveMode[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"ResolveMode\"");
- bb.writeByte(encoded);
- }
- var value = message["public_url"];
- if (value != null) {
- bb.writeByte(4);
- bb.writeString(value);
- }
- var value = message["absolute_working_dir"];
- if (value != null) {
- bb.writeByte(5);
- bb.writeString(value);
- }
- var value = message["define_keys"];
- if (value != null) {
- bb.writeByte(6);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["define_values"];
- if (value != null) {
- bb.writeByte(7);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["preserve_symlinks"];
- if (value != null) {
- bb.writeByte(8);
- bb.writeByte(value);
- }
- var value = message["entry_points"];
- if (value != null) {
- bb.writeByte(9);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["write"];
- if (value != null) {
- bb.writeByte(10);
- bb.writeByte(value);
- }
- var value = message["inject"];
- if (value != null) {
- bb.writeByte(11);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["output_dir"];
- if (value != null) {
- bb.writeByte(12);
- bb.writeString(value);
- }
- var value = message["external"];
- if (value != null) {
- bb.writeByte(13);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["loader_keys"];
- if (value != null) {
- bb.writeByte(14);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["loader_values"];
- if (value != null) {
- bb.writeByte(15);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- var encoded = Loader[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
- bb.writeByte(encoded);
- }
- }
- var value = message["main_fields"];
- if (value != null) {
- bb.writeByte(16);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["platform"];
- if (value != null) {
- bb.writeByte(17);
- var encoded = Platform[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Platform\"");
- bb.writeByte(encoded);
- }
- var value = message["serve"];
- if (value != null) {
- bb.writeByte(18);
- bb.writeByte(value);
- }
- var value = message["extension_order"];
- if (value != null) {
- bb.writeByte(19);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
- var value = message["public_dir"];
- if (value != null) {
- bb.writeByte(20);
- bb.writeString(value);
- }
- bb.writeByte(0);
-}
-function decodeFileHandle(bb) {
- var result = {};
- result["path"] = bb.readString();
- result["size"] = bb.readVarUint();
- result["fd"] = bb.readVarUint();
- return result;
-}
-function encodeFileHandle(message, bb) {
- var value = message["path"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"path\"");
- var value = message["size"];
- if (value != null)
- bb.writeVarUint(value);
- else
- throw new Error("Missing required field \"size\"");
- var value = message["fd"];
- if (value != null)
- bb.writeVarUint(value);
- else
- throw new Error("Missing required field \"fd\"");
-}
-function decodeTransform(bb) {
- var result = {};
- while (true)
- switch (bb.readByte()) {
- case 0:
- return result;
- case 1:
- result["handle"] = decodeFileHandle(bb);
- break;
- case 2:
- result["path"] = bb.readString();
- break;
- case 3:
- result["contents"] = bb.readByteArray();
- break;
- case 4:
- result["loader"] = Loader[bb.readByte()];
- break;
- case 5:
- result["options"] = decodeTransformOptions(bb);
- break;
- default:
- throw new Error("Attempted to parse invalid message");
- }
-}
-function encodeTransform(message, bb) {
- var value = message["handle"];
- if (value != null) {
- bb.writeByte(1);
- encodeFileHandle(value, bb);
- }
- var value = message["path"];
- if (value != null) {
- bb.writeByte(2);
- bb.writeString(value);
- }
- var value = message["contents"];
- if (value != null) {
- bb.writeByte(3);
- bb.writeByteArray(value);
- }
- var value = message["loader"];
- if (value != null) {
- bb.writeByte(4);
- var encoded = Loader[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
- bb.writeByte(encoded);
- }
- var value = message["options"];
- if (value != null) {
- bb.writeByte(5);
- encodeTransformOptions(value, bb);
- }
- bb.writeByte(0);
-}
-const TransformResponseStatus = {
- "1": 1,
- "2": 2,
- success: 1,
- fail: 2
-};
-const TransformResponseStatusKeys = {
- "1": "success",
- "2": "fail",
- success: "success",
- fail: "fail"
-};
-function decodeOutputFile(bb) {
- var result = {};
- result["data"] = bb.readByteArray();
- result["path"] = bb.readString();
- return result;
-}
-function encodeOutputFile(message, bb) {
- var value = message["data"];
- if (value != null)
- bb.writeByteArray(value);
- else
- throw new Error("Missing required field \"data\"");
- var value = message["path"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"path\"");
-}
-function decodeTransformResponse(bb) {
- var result = {};
- result["status"] = TransformResponseStatus[bb.readVarUint()];
- var length = bb.readVarUint();
- var values = result["files"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = decodeOutputFile(bb);
- var length = bb.readVarUint();
- var values = result["errors"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = decodeMessage(bb);
- return result;
-}
-function encodeTransformResponse(message, bb) {
- var value = message["status"];
- if (value != null) {
- var encoded = TransformResponseStatus[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"TransformResponseStatus\"");
- bb.writeVarUint(encoded);
- } else
- throw new Error("Missing required field \"status\"");
- var value = message["files"];
- if (value != null) {
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- encodeOutputFile(value, bb);
- }
- } else
- throw new Error("Missing required field \"files\"");
- var value = message["errors"];
- if (value != null) {
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- encodeMessage(value, bb);
- }
- } else
- throw new Error("Missing required field \"errors\"");
-}
-const MessageKind = {
- "1": 1,
- "2": 2,
- "3": 3,
- "4": 4,
- err: 1,
- warn: 2,
- note: 3,
- debug: 4
-};
-const MessageKindKeys = {
- "1": "err",
- "2": "warn",
- "3": "note",
- "4": "debug",
- err: "err",
- warn: "warn",
- note: "note",
- debug: "debug"
-};
-function decodeLocation(bb) {
- var result = {};
- result["file"] = bb.readString();
- result["namespace"] = bb.readString();
- result["line"] = bb.readInt32();
- result["column"] = bb.readInt32();
- result["line_text"] = bb.readString();
- result["suggestion"] = bb.readString();
- result["offset"] = bb.readVarUint();
- return result;
-}
-function encodeLocation(message, bb) {
- var value = message["file"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"file\"");
- var value = message["namespace"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"namespace\"");
- var value = message["line"];
- if (value != null)
- bb.writeInt32(value);
- else
- throw new Error("Missing required field \"line\"");
- var value = message["column"];
- if (value != null)
- bb.writeInt32(value);
- else
- throw new Error("Missing required field \"column\"");
- var value = message["line_text"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"line_text\"");
- var value = message["suggestion"];
- if (value != null)
- bb.writeString(value);
- else
- throw new Error("Missing required field \"suggestion\"");
- var value = message["offset"];
- if (value != null)
- bb.writeVarUint(value);
- else
- throw new Error("Missing required field \"offset\"");
-}
-function decodeMessageData(bb) {
- var result = {};
- while (true)
- switch (bb.readByte()) {
- case 0:
- return result;
- case 1:
- result["text"] = bb.readString();
- break;
- case 2:
- result["location"] = decodeLocation(bb);
- break;
- default:
- throw new Error("Attempted to parse invalid message");
- }
-}
-function encodeMessageData(message, bb) {
- var value = message["text"];
- if (value != null) {
- bb.writeByte(1);
- bb.writeString(value);
- }
- var value = message["location"];
- if (value != null) {
- bb.writeByte(2);
- encodeLocation(value, bb);
- }
- bb.writeByte(0);
-}
-function decodeMessage(bb) {
- var result = {};
- result["kind"] = MessageKind[bb.readVarUint()];
- result["data"] = decodeMessageData(bb);
- var length = bb.readVarUint();
- var values = result["notes"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = decodeMessageData(bb);
- return result;
-}
-function encodeMessage(message, bb) {
- var value = message["kind"];
- if (value != null) {
- var encoded = MessageKind[value];
- if (encoded === undefined)
- throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"MessageKind\"");
- bb.writeVarUint(encoded);
- } else
- throw new Error("Missing required field \"kind\"");
- var value = message["data"];
- if (value != null)
- encodeMessageData(value, bb);
- else
- throw new Error("Missing required field \"data\"");
- var value = message["notes"];
- if (value != null) {
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- encodeMessageData(value, bb);
- }
- } else
- throw new Error("Missing required field \"notes\"");
-}
-function decodeLog(bb) {
- var result = {};
- result["warnings"] = bb.readUint32();
- result["errors"] = bb.readUint32();
- var length = bb.readVarUint();
- var values = result["msgs"] = Array(length);
- for (var i = 0;i < length; i++)
- values[i] = decodeMessage(bb);
- return result;
-}
-function encodeLog(message, bb) {
- var value = message["warnings"];
- if (value != null)
- bb.writeUint32(value);
- else
- throw new Error("Missing required field \"warnings\"");
- var value = message["errors"];
- if (value != null)
- bb.writeUint32(value);
- else
- throw new Error("Missing required field \"errors\"");
- var value = message["msgs"];
- if (value != null) {
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0;i < n; i++) {
- value = values[i];
- encodeMessage(value, bb);
- }
- } else
- throw new Error("Missing required field \"msgs\"");
-}
-
-export {Loader};
-export {LoaderKeys};
-export {ResolveMode};
-export {ResolveModeKeys};
-export {Platform};
-export {PlatformKeys};
-export {JSXRuntime};
-export {JSXRuntimeKeys};
-export {decodeJSX};
-export {encodeJSX};
-export {decodeTransformOptions};
-export {encodeTransformOptions};
-export {decodeFileHandle};
-export {encodeFileHandle};
-export {decodeTransform};
-export {encodeTransform};
-export {TransformResponseStatus};
-export {TransformResponseStatusKeys};
-export {decodeOutputFile};
-export {encodeOutputFile};
-export {decodeTransformResponse};
-export {encodeTransformResponse};
-export {MessageKind};
-export {MessageKindKeys};
-export {decodeLocation};
-export {encodeLocation};
-export {decodeMessageData};
-export {encodeMessageData};
-export {decodeMessage};
-export {encodeMessage};
-export {decodeLog};
-export {encodeLog};
diff --git a/src/allocators.zig b/src/allocators.zig
index 8ebde9150..1bee408a0 100644
--- a/src/allocators.zig
+++ b/src/allocators.zig
@@ -113,7 +113,8 @@ pub const ItemStatus = enum(u3) {
const hasDeinit = std.meta.trait.hasFn("deinit")(ValueType);
-pub fn BSSList(comptime ValueType: type, comptime count: anytype) type {
+pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
+ const count = _count * 2;
const max_index = count - 1;
var list_type: type = undefined;
var list_count = count;
@@ -233,7 +234,8 @@ pub fn BSSList(comptime ValueType: type, comptime count: anytype) type {
}
};
}
-pub fn BSSStringList(comptime count: usize, comptime item_length: usize) type {
+pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
+ const count = _count * 2;
const max_index = count - 1;
const ValueType = []const u8;
@@ -352,125 +354,6 @@ pub fn BSSStringList(comptime count: usize, comptime item_length: usize) type {
};
}
-pub fn TBSSStringList(comptime count: usize, comptime item_length: usize) type {
- const max_index = count - 1;
- const ValueType = []const u8;
-
- return struct {
- const Allocator = std.mem.Allocator;
- const Self = @This();
-
- pub threadlocal var slice_buf: [count][]const u8 = undefined;
- pub threadlocal var slice_buf_used: u16 = 0;
- pub threadlocal var backing_buf: [count * item_length]u8 = undefined;
- pub threadlocal var backing_buf_used: u64 = undefined;
- pub threadlocal var instance: Self = undefined;
- pub const ListIndex = packed struct {
- index: u31,
- is_overflowing: bool = false,
- };
- overflow_list: std.ArrayListUnmanaged(ValueType),
- allocator: *Allocator,
-
- pub fn init(allocator: *std.mem.Allocator) *Self {
- instance = Self{
- .allocator = allocator,
- .overflow_list = std.ArrayListUnmanaged(ValueType){},
- };
-
- return &instance;
- }
-
- pub fn isOverflowing() bool {
- return slice_buf_used >= @as(u16, count);
- }
-
- pub fn at(self: *const Self, index: IndexType) ?ValueType {
- if (index.index == NotFound.index or index.index == Unassigned.index) return null;
-
- if (index.is_overflowing) {
- return &self.overflow_list.items[index.index];
- } else {
- return &slice_buf[index.index];
- }
- }
-
- pub fn exists(self: *Self, value: ValueType) bool {
- return isSliceInBuffer(value, slice_buf);
- }
-
- pub fn editableSlice(slice: []const u8) []u8 {
- return constStrToU8(slice);
- }
-
- pub fn append(self: *Self, _value: anytype) ![]const u8 {
- var value = _value;
- if (value.len + backing_buf_used < backing_buf.len - 1) {
- const start = backing_buf_used;
- backing_buf_used += value.len;
- std.mem.copy(u8, backing_buf[start..backing_buf_used], _value);
- value = backing_buf[start..backing_buf_used];
- } else {
- value = try self.allocator.dupe(u8, _value);
- }
-
- var result = ListIndex{ .index = std.math.maxInt(u31), .is_overflowing = slice_buf_used > max_index };
-
- if (result.is_overflowing) {
- result.index = @intCast(u31, self.overflow_list.items.len);
- } else {
- result.index = slice_buf_used;
- slice_buf_used += 1;
- if (slice_buf_used >= max_index) {
- self.overflow_list = try @TypeOf(self.overflow_list).initCapacity(self.allocator, count);
- }
- }
-
- if (result.is_overflowing) {
- if (self.overflow_list.items.len == result.index) {
- const real_index = self.overflow_list.items.len;
- try self.overflow_list.append(self.allocator, value);
- } else {
- self.overflow_list.items[result.index] = value;
- }
-
- return self.overflow_list.items[result.index];
- } else {
- slice_buf[result.index] = value;
-
- return slice_buf[result.index];
- }
- }
-
- pub fn remove(self: *Self, index: ListIndex) void {
- @compileError("Not implemented yet.");
- // switch (index) {
- // Unassigned.index => {
- // self.index.remove(_key);
- // },
- // NotFound.index => {
- // self.index.remove(_key);
- // },
- // 0...max_index => {
- // if (hasDeinit(ValueType)) {
- // slice_buf[index].deinit();
- // }
- // slice_buf[index] = undefined;
- // },
- // else => {
- // const i = index - count;
- // if (hasDeinit(ValueType)) {
- // self.overflow_list.items[i].deinit();
- // }
- // self.overflow_list.items[index - count] = undefined;
- // },
- // }
-
- // return index;
- }
- };
-}
-
pub fn BSSMap(comptime ValueType: type, comptime count: anytype, store_keys: bool, estimated_key_length: usize) type {
const max_index = count - 1;
const BSSMapType = struct {
diff --git a/src/bundler.zig b/src/bundler.zig
index aa630e2e0..732fa354e 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -25,6 +25,7 @@ const MimeType = @import("./http/mime_type.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
const runtime = @import("./runtime.zig");
const Timer = @import("./timer.zig");
+const hash_map = @import("hash_map.zig");
const DebugLogs = _resolver.DebugLogs;
@@ -101,6 +102,29 @@ pub const ParseResult = struct {
ast: js_ast.Ast,
};
+pub const ScanResult = struct {
+ path: Fs.Path,
+ is_node_module: bool = false,
+ file_size: u32 = 0,
+ import_record_start: u32,
+ import_record_length: u32,
+
+ pub const Summary = struct {
+ import_records: std.ArrayList(ImportRecord),
+ scan_results: std.ArrayList(ScanResult),
+ pub fn list(summary: *const Summary) List {
+ return List{
+ .import_records = summary.import_records.items,
+ .scan_results = summary.scan_results.items,
+ };
+ }
+ pub const List = struct {
+ import_records: []ImportRecord,
+ scan_results: []ScanResult,
+ };
+ };
+};
+
pub fn NewBundler(cache_files: bool) type {
return struct {
const Linker = if (cache_files) linker.Linker else linker.ServeLinker;
@@ -136,6 +160,8 @@ pub fn NewBundler(cache_files: bool) type {
) !ThisBundler {
js_ast.Expr.Data.Store.create(allocator);
js_ast.Stmt.Data.Store.create(allocator);
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false);
const bundle_options = try options.BundleOptions.fromApi(allocator, fs, log, opts);
@@ -175,6 +201,394 @@ pub fn NewBundler(cache_files: bool) type {
js_ast.Stmt.Data.Store.reset();
}
+ pub const GenerateNodeModuleBundle = struct {
+ module_list: std.ArrayList(Api.JavascriptBundledModule),
+ package_list: std.ArrayList(Api.JavascriptBundledPackage),
+ header_string_buffer: MutableString,
+ // Just need to know if we've already enqueued this one
+ resolved_paths: hash_map.StringHashMap(void),
+ package_list_map: hash_map.StringHashMap(u32),
+ resolve_queue: std.fifo.LinearFifo(_resolver.Result, .Dynamic),
+ bundler: *ThisBundler,
+ allocator: *std.mem.Allocator,
+ scan_pass_result: js_parser.ScanPassResult,
+ tmpfile: std.fs.File,
+ log: *logger.Log,
+ tmpfile_byte_offset: u32 = 0,
+ code_end_byte_offset: u32 = 0,
+
+ pub const current_version: u32 = 1;
+
+ // The Speedy Bundle Format
+ // Your entire node_modules folder in a single compact file designed for web browsers.
+ // A binary JavaScript bundle format prioritizing bundle time and serialization/deserialization time
+ pub const magic_bytes = "#!/usr/bin/env speedy\n\n";
+ // This makes it possible to do ./path-to-bundle on posix systems you can see the raw JS contents
+ // https://en.wikipedia.org/wiki/Magic_number_(programming)#In_files
+ // Immediately after the magic bytes, the next character is a uint32 followed by a newline
+ // 0x00000000\n
+ // That uint32 denotes the byte offset in the file where the code for the bundle ends
+ // - If the value is 0, that means the file did not finish writing or there are no modules
+ // - This imposes a maximum bundle size of around 4,294,967,295 bytes. If your JS is more than 4 GB, you probably should fix that...
+ // The raw JavaScript is encoded as a UTF-8 string starting from the current position + 1 until the above byte offset.
+ // This uint32 is useful for HTTP servers to separate:
+ // - Which part of the bundle is the JS code?
+ // - Which part is the metadata?
+ // Without needing to do a full pass through the file.
+ // The metadata is at the bottom of the file instead of the top because the metadata is generated after the entire bundle is written.
+ // The rationale there is:
+ // 1. We cannot prepend to a file without a pass over the entire file
+ // 2. The metadata is variable-length and that format will change more often. Perhaps different bundlers will generate different metadata.
+ // If you have 32 MB of JavaScript dependencies, the only time it's acceptable to do a full pass is when sending it over HTTP via sendfile()
+ // So instead, we append to the file after printing each node_module
+ // When there are no more modules to process, we generate the metadata
+ // To find the metadata, you look at the byte offset: initial_header[magic_bytes.len..initial_header.len - 1]
+ // Then, you add that number to initial_header.len
+ const initial_header = {
+ var buf = std.mem.zeroes([magic_bytes.len + 5]u8);
+ std.mem.copy(u8, &buf, magic_bytes);
+ var remainder = buf[magic_bytes.len..];
+ // Write an invalid byte offset to be updated after the file ends
+ std.mem.writeIntNative(u32, remainder[0 .. remainder.len - 1], 0);
+ buf[buf.len - 1] = '\n';
+ return buf;
+ };
+ const code_start_byte_offset: u32 = initial_header.len;
+
+ pub fn appendHeaderString(generator: *GenerateNodeModuleBundle, str: string) !Api.StringPointer {
+ var offset = generator.header_string_buffer.list.items.len;
+ try generator.header_string_buffer.append(str);
+ return Api.StringPointer{
+ .offset = @truncate(u32, offset),
+ .length = @truncate(u32, str.len),
+ };
+ }
+
+ pub fn generate(bundler: *ThisBundler, allocator: *std.mem.Allocator) !void {
+ var tmpdir: std.fs.Dir = bundler.fs.tmpdir();
+ const tmpname = try bundler.fs.tmpname(".jsbundle");
+
+ var tmpfile = try tmpdir.createFile(tmpname, .{});
+ var generator = GenerateNodeModuleBundle{
+ .module_list = std.ArrayList(Api.JavascriptBundledModule).init(allocator),
+ .package_list = std.ArrayList(Api.JavascriptBundledPackage).init(allocator),
+ .scan_pass_result = js_parser.ScanPassResult.init(allocator),
+ .header_string_buffer = try MutableString.init(allocator, 0),
+ .allocator = allocator,
+ .resolved_paths = hash_map.StringHashMap(void).init(allocator),
+ .resolve_queue = std.fifo.LinearFifo(_resolver.Result, .Dynamic).init(allocator),
+ .bundler = bundler,
+ .tmpfile = tmpfile,
+ .log = bundler.log,
+ .package_list_map = hash_map.StringHashMap(u32).init(allocator),
+ };
+ var this = &generator;
+ // Always inline the runtime into the bundle
+ try generator.appendBytes(initial_header ++ runtime.SourceContent ++ "\n\n");
+
+ if (isDebug) {
+ generator.log.level = .verbose;
+ bundler.resolver.debug_logs = try DebugLogs.init(allocator);
+ }
+
+ for (bundler.options.entry_points) |entry_point| {
+ const entry_point_path = bundler.normalizeEntryPointPath(entry_point);
+ const source_dir = bundler.fs.top_level_dir;
+ const resolved = try bundler.linker.resolver.resolve(source_dir, entry_point, .entry_point);
+ try this.resolve_queue.writeItem(resolved);
+ }
+
+ while (this.resolve_queue.readItem()) |resolved| {
+ try this.processFile(resolved);
+ }
+ // Ensure we never overflow
+ this.code_end_byte_offset = @truncate(
+ u32,
+ std.math.max(this.tmpfile_byte_offset, @truncate(u32, initial_header.len)) - initial_header.len,
+ );
+ if (isDebug) {
+ Output.print(
+ "Wrote {d} bytes of code for {d} modules and {d} packages\n",
+ .{ this.code_end_byte_offset - code_start_byte_offset, this.module_list.items.len, this.package_list.items.len },
+ );
+ }
+ var javascript_bundle_container = std.mem.zeroes(Api.JavascriptBundleContainer);
+
+ std.sort.sort(Api.JavascriptBundledModule, this.module_list.items, this, GenerateNodeModuleBundle.sortJavascriptModuleByPath);
+ var hasher = std.hash.Wyhash.init(0);
+
+ if (this.module_list.items.len > 0) {
+ var i: usize = 0;
+ // Assumption: packages are immutable
+ // Assumption: module files are immutable
+ // The etag is the hash of each module's path in sorted order
+ // followed by the hash of package-name@version
+ // This will allow any unused files to force re-updating the bundle
+ // or package version changes
+ while (i < this.module_list.items.len) {
+ var current_package_id = this.module_list.items[i].package_id;
+ var offset = @truncate(u32, i);
+ hasher.update(this.metadataStringPointer(this.module_list.items[i].path));
+
+ i += 1;
+
+ while (i < this.module_list.items.len and this.module_list.items[i].package_id == current_package_id) : (i += 1) {
+ hasher.update(this.metadataStringPointer(this.module_list.items[i].path));
+ break;
+ }
+
+ this.package_list.items[current_package_id].modules_offset = offset;
+ this.package_list.items[current_package_id].modules_length = @truncate(u32, i) - offset;
+
+ var bytes: [4]u8 = undefined;
+ std.mem.writeIntNative(u32, &bytes, this.package_list.items[current_package_id].hash);
+ hasher.update(&bytes);
+ }
+ }
+
+ var javascript_bundle = std.mem.zeroes(Api.JavascriptBundle);
+ javascript_bundle.modules = this.module_list.items;
+ javascript_bundle.packages = this.package_list.items;
+ javascript_bundle.manifest_string = this.header_string_buffer.list.items;
+
+ javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp()));
+
+ var from_name = "node_modules.jsbundle".*;
+ javascript_bundle.import_from_name = &from_name;
+
+ var etag_bytes: [8]u8 = undefined;
+ std.mem.writeIntNative(u64, &etag_bytes, hasher.final());
+ javascript_bundle.etag = &etag_bytes;
+
+ javascript_bundle_container.bundle_format_version = current_version;
+ javascript_bundle_container.bundle = javascript_bundle;
+ javascript_bundle_container.code_length = this.code_end_byte_offset;
+
+ var tmpwriter = this.tmpfile.writer();
+ try javascript_bundle_container.encode(tmpwriter);
+ try this.tmpfile.seekTo(magic_bytes.len);
+ var code_length_bytes: [4]u8 = undefined;
+ std.mem.writeIntNative(u32, &code_length_bytes, this.code_end_byte_offset);
+ try this.tmpfile.writeAll(&code_length_bytes);
+
+ const top_dir = try std.fs.openDirAbsolute(this.bundler.fs.top_level_dir, .{});
+ try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, "node_modules.jsbundle");
+
+ // Print any errors at the end
+ try this.log.print(Output.errorWriter());
+
+ if (isDebug) {
+ Output.println("Saved node_modules.jsbundle", .{});
+ }
+ }
+
+ pub fn metadataStringPointer(this: *GenerateNodeModuleBundle, ptr: Api.StringPointer) string {
+ return this.header_string_buffer.list.items[ptr.offset .. ptr.offset + ptr.length];
+ }
+
+ pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool {
+ return std.mem.order(u8, ctx.metadataStringPointer(a.path), ctx.metadataStringPointer(b.path)) == .lt;
+ }
+
+ // pub fn sortJavascriptPackageByName(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledPackage, b: Api.JavascriptBundledPackage) bool {
+ // return std.mem.order(u8, ctx.metadataStringPointer(a.name), ctx.metadataStringPointer(b.name)) == .lt;
+ // }
+
+ pub fn appendBytes(generator: *GenerateNodeModuleBundle, bytes: anytype) !void {
+ try generator.tmpfile.writeAll(bytes);
+ generator.tmpfile_byte_offset += @truncate(u32, bytes.len);
+ }
+
+ fn processImportRecord(this: *GenerateNodeModuleBundle, import_record: ImportRecord) !void {}
+ threadlocal var package_key_buf: [512]u8 = undefined;
+ fn processFile(this: *GenerateNodeModuleBundle, _resolve: _resolver.Result) !void {
+ var resolve = _resolve;
+ if (resolve.is_external) return;
+ const node_module_root_string = comptime "node_modules" ++ std.fs.path.sep_str;
+ resolve.is_from_node_modules = strings.contains(resolve.path_pair.primary.text, node_module_root_string);
+ const loader = this.bundler.options.loaders.get(resolve.path_pair.primary.name.ext) orelse .file;
+ var bundler = this.bundler;
+ defer this.scan_pass_result.reset();
+ defer this.bundler.resetStore();
+ const file_path = resolve.path_pair.primary;
+
+ // If we're in a node_module, build that almost normally
+ if (resolve.is_from_node_modules) {
+ switch (loader) {
+ .jsx,
+ .tsx,
+ .js,
+ .ts,
+ => {
+ const entry = try bundler.resolver.caches.fs.readFile(
+ bundler.fs,
+ file_path.text,
+ resolve.dirname_fd,
+ true,
+ );
+ const source = logger.Source.initFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
+ const source_dir = file_path.name.dir;
+
+ var jsx = bundler.options.jsx;
+ jsx.parse = loader.isJSX();
+ var opts = js_parser.Parser.Options.init(jsx, loader);
+ opts.output_commonjs = true;
+ var ast: js_ast.Ast = (try bundler.resolver.caches.js.parse(
+ bundler.allocator,
+ opts,
+ bundler.options.define,
+ this.log,
+ &source,
+ )) orelse return;
+
+ for (ast.import_records) |*import_record, record_id| {
+
+ // Don't resolve the runtime
+ if (import_record.is_internal) {
+ continue;
+ }
+
+ if (bundler.linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| {
+ const resolved_import: *const _resolver.Result = _resolved_import;
+ if (resolved_import.is_external) {
+ continue;
+ }
+
+ const absolute_path = resolved_import.path_pair.primary.text;
+
+ // It should be the first index, not the last to support bundling multiple of the same package
+ if (strings.indexOf(absolute_path, node_module_root_string)) |node_module_start| {
+ import_record.path = Fs.Path.init(absolute_path[node_module_root_string.len + node_module_start ..]);
+ }
+
+ const get_or_put_result = try this.resolved_paths.getOrPut(absolute_path);
+
+ if (get_or_put_result.found_existing) {
+ continue;
+ }
+
+ try this.resolve_queue.writeItem(_resolved_import.*);
+ } else |err| {}
+ }
+
+ const code_offset = this.tmpfile_byte_offset - code_start_byte_offset;
+ var writer = js_printer.NewFileWriter(this.tmpfile);
+ var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
+
+ const code_length = @truncate(
+ u32,
+ try js_printer.printCommonJS(
+ @TypeOf(writer),
+ writer,
+ ast,
+ js_ast.Symbol.Map.initList(symbols),
+ &source,
+ false,
+ js_printer.Options{
+ .to_module_ref = Ref.RuntimeRef,
+ .externals = ast.externals,
+ // Indent by one
+ .indent = 1,
+ .runtime_imports = ast.runtime_imports,
+ },
+ Linker,
+ &bundler.linker,
+ ),
+ );
+ this.tmpfile_byte_offset += code_length;
+
+ const package_name = resolve.package_json_name.?;
+ const package_version = resolve.package_json_version.?;
+
+ const package_id_key = try std.fmt.bufPrint(&package_key_buf, "{s}@{s}", .{ package_name, package_version });
+ const package_id_key_hash = @TypeOf(this.package_list_map).getHash(package_id_key);
+ var package_get_or_put_entry = try this.package_list_map.getOrPutWithHash(package_id_key, package_id_key_hash);
+ if (!package_get_or_put_entry.found_existing) {
+ package_get_or_put_entry.entry.value = @truncate(u32, this.package_list.items.len);
+ try this.package_list.append(
+ Api.JavascriptBundledPackage{
+ .name = try this.appendHeaderString(package_name),
+ .version = try this.appendHeaderString(package_version),
+ .hash = @truncate(u32, package_id_key_hash),
+ },
+ );
+ }
+ const node_module_root = strings.indexOf(resolve.path_pair.primary.text, node_module_root_string) orelse unreachable;
+
+ try this.module_list.append(
+ Api.JavascriptBundledModule{
+ .path = try this.appendHeaderString(resolve.path_pair.primary.text[node_module_root + node_module_root_string.len ..]),
+ .package_id = package_get_or_put_entry.entry.value,
+ .code = Api.StringPointer{
+ .length = @truncate(u32, code_length),
+ .offset = @truncate(u32, code_offset),
+ },
+ },
+ );
+ },
+ else => {},
+ }
+ } else {
+ // If it's app code, scan but do not fully parse.
+ switch (loader) {
+ .jsx,
+ .tsx,
+ .js,
+ .ts,
+ => {
+ const entry = bundler.resolver.caches.fs.readFile(
+ bundler.fs,
+ file_path.text,
+ resolve.dirname_fd,
+ true,
+ ) catch return;
+
+ const source = logger.Source.initFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
+ const source_dir = file_path.name.dir;
+
+ var jsx = bundler.options.jsx;
+ jsx.parse = loader.isJSX();
+ var opts = js_parser.Parser.Options.init(jsx, loader);
+
+ try bundler.resolver.caches.js.scan(
+ bundler.allocator,
+ &this.scan_pass_result,
+ opts,
+ bundler.options.define,
+ this.log,
+ &source,
+ );
+
+ for (this.scan_pass_result.import_records.items) |*import_record, i| {
+ if (import_record.is_internal) {
+ continue;
+ }
+
+ if (bundler.linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| {
+ const resolved_import: *const _resolver.Result = _resolved_import;
+ if (resolved_import.is_external) {
+ continue;
+ }
+
+ const get_or_put_result = try this.resolved_paths.getOrPut(resolved_import.path_pair.primary.text);
+
+ if (get_or_put_result.found_existing) {
+ continue;
+ }
+
+ try this.resolve_queue.writeItem(_resolved_import.*);
+ } else |err| {}
+ }
+ },
+ // TODO:
+ else => {
+ return;
+ },
+ }
+ }
+ }
+ };
+
pub fn buildWithResolveResult(
bundler: *ThisBundler,
resolve_result: _resolver.Result,
@@ -182,6 +596,7 @@ pub fn NewBundler(cache_files: bool) type {
loader: options.Loader,
comptime Writer: type,
writer: Writer,
+ comptime import_path_format: options.BundleOptions.ImportPathFormat,
) !usize {
if (resolve_result.is_external) {
return 0;
@@ -202,7 +617,7 @@ pub fn NewBundler(cache_files: bool) type {
var old_linker_allocator = bundler.linker.allocator;
defer bundler.linker.allocator = old_linker_allocator;
bundler.linker.allocator = allocator;
- try bundler.linker.link(file_path, &result);
+ try bundler.linker.link(file_path, &result, import_path_format);
return try bundler.print(
result,
@@ -213,14 +628,17 @@ pub fn NewBundler(cache_files: bool) type {
}
- pub fn buildWithResolveResultEager(bundler: *ThisBundler, resolve_result: _resolver.Result) !?options.OutputFile {
+ pub fn buildWithResolveResultEager(
+ bundler: *ThisBundler,
+ resolve_result: _resolver.Result,
+ comptime import_path_format: options.BundleOptions.ImportPathFormat,
+ comptime Outstream: type,
+ outstream: Outstream,
+ ) !?options.OutputFile {
if (resolve_result.is_external) {
return null;
}
- errdefer js_ast.Expr.Data.Store.reset();
- errdefer js_ast.Stmt.Data.Store.reset();
-
// Step 1. Parse & scan
const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
var file_path = resolve_result.path_pair.primary;
@@ -229,24 +647,33 @@ pub fn NewBundler(cache_files: bool) type {
switch (loader) {
.jsx, .tsx, .js, .ts, .json => {
var result = bundler.parse(bundler.allocator, file_path, loader, resolve_result.dirname_fd) orelse {
- js_ast.Expr.Data.Store.reset();
- js_ast.Stmt.Data.Store.reset();
return null;
};
- try bundler.linker.link(file_path, &result);
+ try bundler.linker.link(
+ file_path,
+ &result,
+ import_path_format,
+ );
var output_file = options.OutputFile{
.input = file_path,
.loader = loader,
.value = undefined,
};
- const output_dir = bundler.options.output_dir_handle.?;
- if (std.fs.path.dirname(file_path.pretty)) |dirname| {
- try output_dir.makePath(dirname);
+ var file: std.fs.File = undefined;
+
+ if (Outstream == std.fs.Dir) {
+ const output_dir = outstream;
+
+ if (std.fs.path.dirname(file_path.pretty)) |dirname| {
+ try output_dir.makePath(dirname);
+ }
+ file = try output_dir.createFile(file_path.pretty, .{});
+ } else {
+ file = outstream;
}
- var file = try output_dir.createFile(file_path.pretty, .{});
output_file.size = try bundler.print(
result,
js_printer.FileWriter,
@@ -254,15 +681,19 @@ pub fn NewBundler(cache_files: bool) type {
);
var file_op = options.OutputFile.FileOperation.fromFile(file.handle, file_path.pretty);
- file_op.dir = output_dir.fd;
+
file_op.fd = file.handle;
- if (bundler.fs.fs.needToCloseFiles()) {
- file.close();
- file_op.fd = 0;
- }
file_op.is_tmpdir = false;
output_file.value = .{ .move = file_op };
+ if (Outstream == std.fs.Dir) {
+ file_op.dir = outstream.fd;
+
+ if (bundler.fs.fs.needToCloseFiles()) {
+ file.close();
+ file_op.fd = 0;
+ }
+ }
return output_file;
},
// TODO:
@@ -272,6 +703,73 @@ pub fn NewBundler(cache_files: bool) type {
}
}
+ pub fn scanWithResolveResult(
+ bundler: *ThisBundler,
+ resolve_result: _resolver.Result,
+ scan_pass_result: *js_parser.ScanPassResult,
+ ) !?ScanResult {
+ if (resolve_result.is_external) {
+ return null;
+ }
+ var import_records = &scan_pass_result.import_records;
+ var named_imports = &scan_pass_result.named_imports;
+ errdefer js_ast.Expr.Data.Store.reset();
+ errdefer js_ast.Stmt.Data.Store.reset();
+
+ // Step 1. Parse & scan
+ const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
+ var file_path = resolve_result.path_pair.primary;
+ file_path.pretty = Linker.relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
+
+ switch (loader) {
+ .jsx, .tsx, .js, .ts, .json => {
+ const entry = bundler.resolver.caches.fs.readFile(
+ bundler.fs,
+ file_path.text,
+ resolve_result.dirname_fd,
+ !cache_files,
+ ) catch return null;
+
+ const source = logger.Source.initFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
+ const source_dir = file_path.name.dir;
+
+ var jsx = bundler.options.jsx;
+ jsx.parse = loader.isJSX();
+ var opts = js_parser.Parser.Options.init(jsx, loader);
+
+ var result = ScanResult{
+ .path = file_path,
+ .file_size = @truncate(u32, source.contents.len),
+ .is_node_module = resolve_result.is_from_node_modules or strings.contains(file_path.text, "node_modules" ++ std.fs.path.sep_str),
+ .import_record_start = @truncate(u32, import_records.items.len),
+ .import_record_length = 0,
+ };
+
+ try bundler.resolver.caches.js.scan(
+ bundler.allocator,
+ scan_pass_result,
+ opts,
+ bundler.options.define,
+ bundler.log,
+ &source,
+ );
+ result.import_record_length = @truncate(u32, import_records.items.len - result.import_record_start);
+ for (import_records.items[result.import_record_start..import_records.items.len]) |*import_record, i| {
+ if (bundler.linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
+ if (resolved_import.is_external) {
+ continue;
+ }
+ } else |err| {}
+ }
+ return result;
+ },
+ // TODO:
+ else => {
+ return null;
+ },
+ }
+ }
+
pub fn print(
bundler: *ThisBundler,
result: ParseResult,
@@ -531,6 +1029,124 @@ pub fn NewBundler(cache_files: bool) type {
}
}
+ pub fn normalizeEntryPointPath(bundler: *ThisBundler, _entry: string) string {
+ var paths = [_]string{_entry};
+ var entry = bundler.fs.abs(&paths);
+
+ std.fs.accessAbsolute(entry, .{}) catch |err| {
+ return _entry;
+ };
+
+ entry = bundler.fs.relativeTo(entry);
+
+ if (!strings.startsWith(entry, "./")) {
+ // Entry point paths without a leading "./" are interpreted as package
+ // paths. This happens because they go through general path resolution
+ // like all other import paths so that plugins can run on them. Requiring
+ // a leading "./" for a relative path simplifies writing plugins because
+ // entry points aren't a special case.
+ //
+ // However, requiring a leading "./" also breaks backward compatibility
+ // and makes working with the CLI more difficult. So attempt to insert
+ // "./" automatically when needed. We don't want to unconditionally insert
+ // a leading "./" because the path may not be a file system path. For
+ // example, it may be a URL. So only insert a leading "./" when the path
+ // is an exact match for an existing file.
+ var __entry = bundler.allocator.alloc(u8, "./".len + entry.len) catch unreachable;
+ __entry[0] = '.';
+ __entry[1] = '/';
+ std.mem.copy(u8, __entry[2..__entry.len], entry);
+ entry = __entry;
+ }
+
+ return entry;
+ }
+
+ pub fn scanDependencies(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ _opts: Api.TransformOptions,
+ ) !ScanResult.Summary {
+ var opts = _opts;
+ opts.resolve = .dev;
+ var bundler = try ThisBundler.init(allocator, log, opts);
+
+ bundler.configureLinker();
+
+ var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len);
+
+ if (isDebug) {
+ log.level = .verbose;
+ bundler.resolver.debug_logs = try DebugLogs.init(allocator);
+ }
+
+ var rfs: *Fs.FileSystem.RealFS = &bundler.fs.fs;
+
+ var entry_point_i: usize = 0;
+ for (bundler.options.entry_points) |_entry| {
+ var entry: string = bundler.normalizeEntryPointPath(_entry);
+
+ defer {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ }
+
+ const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| {
+ Output.printError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) });
+ continue;
+ };
+
+ const key = result.path_pair.primary.text;
+ if (bundler.resolve_results.contains(key)) {
+ continue;
+ }
+ try bundler.resolve_results.put(key, result);
+ entry_points[entry_point_i] = result;
+
+ if (isDebug) {
+ Output.print("Resolved {s} => {s}", .{ entry, result.path_pair.primary.text });
+ }
+
+ entry_point_i += 1;
+ bundler.resolve_queue.writeItem(result) catch unreachable;
+ }
+ var scan_results = std.ArrayList(ScanResult).init(allocator);
+ var scan_pass_result = js_parser.ScanPassResult.init(allocator);
+
+ switch (bundler.options.resolve_mode) {
+ .lazy, .dev, .bundle => {
+ while (bundler.resolve_queue.readItem()) |item| {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ scan_pass_result.named_imports.clearRetainingCapacity();
+ scan_results.append(bundler.scanWithResolveResult(item, &scan_pass_result) catch continue orelse continue) catch continue;
+ }
+ },
+ else => Global.panic("Unsupported resolve mode: {s}", .{@tagName(bundler.options.resolve_mode)}),
+ }
+
+ // if (log.level == .verbose) {
+ // for (log.msgs.items) |msg| {
+ // try msg.writeFormat(std.io.getStdOut().writer());
+ // }
+ // }
+
+ if (FeatureFlags.tracing) {
+ Output.printError(
+ "\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n",
+ .{
+ bundler.resolver.elapsed,
+ bundler.elapsed,
+ },
+ );
+ }
+
+ return ScanResult.Summary{
+ .scan_results = scan_results,
+ .import_records = scan_pass_result.import_records,
+ };
+ }
+
pub fn bundle(
allocator: *std.mem.Allocator,
log: *logger.Log,
@@ -557,27 +1173,7 @@ pub fn NewBundler(cache_files: bool) type {
var entry_point_i: usize = 0;
for (bundler.options.entry_points) |_entry| {
- var entry: string = _entry;
-
- if (!strings.startsWith(entry, "./")) {
- // Entry point paths without a leading "./" are interpreted as package
- // paths. This happens because they go through general path resolution
- // like all other import paths so that plugins can run on them. Requiring
- // a leading "./" for a relative path simplifies writing plugins because
- // entry points aren't a special case.
- //
- // However, requiring a leading "./" also breaks backward compatibility
- // and makes working with the CLI more difficult. So attempt to insert
- // "./" automatically when needed. We don't want to unconditionally insert
- // a leading "./" because the path may not be a file system path. For
- // example, it may be a URL. So only insert a leading "./" when the path
- // is an exact match for an existing file.
- var __entry = allocator.alloc(u8, "./".len + entry.len) catch unreachable;
- __entry[0] = '.';
- __entry[1] = '/';
- std.mem.copy(u8, __entry[2..__entry.len], entry);
- entry = __entry;
- }
+ var entry: string = bundler.normalizeEntryPointPath(_entry);
defer {
js_ast.Expr.Data.Store.reset();
@@ -604,16 +1200,27 @@ pub fn NewBundler(cache_files: bool) type {
bundler.resolve_queue.writeItem(result) catch unreachable;
}
- switch (bundler.options.resolve_mode) {
- .lazy, .dev, .bundle => {
- while (bundler.resolve_queue.readItem()) |item| {
- js_ast.Expr.Data.Store.reset();
- js_ast.Stmt.Data.Store.reset();
- const output_file = bundler.buildWithResolveResultEager(item) catch continue orelse continue;
- bundler.output_files.append(output_file) catch unreachable;
- }
- },
- else => Global.panic("Unsupported resolve mode: {s}", .{@tagName(bundler.options.resolve_mode)}),
+ if (bundler.options.output_dir_handle == null) {
+ const outstream = std.io.getStdOut();
+ try switch (bundler.options.import_path_format) {
+ .relative => bundler.processResolveQueue(.relative, @TypeOf(outstream), outstream),
+ .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, @TypeOf(outstream), outstream),
+ .absolute_url => bundler.processResolveQueue(.absolute_url, @TypeOf(outstream), outstream),
+ .absolute_path => bundler.processResolveQueue(.absolute_path, @TypeOf(outstream), outstream),
+ .package_path => bundler.processResolveQueue(.package_path, @TypeOf(outstream), outstream),
+ };
+ } else {
+ const output_dir = bundler.options.output_dir_handle orelse {
+ Output.printError("Invalid or missing output directory.", .{});
+ std.os.exit(1);
+ };
+ try switch (bundler.options.import_path_format) {
+ .relative => bundler.processResolveQueue(.relative, std.fs.Dir, output_dir),
+ .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, std.fs.Dir, output_dir),
+ .absolute_url => bundler.processResolveQueue(.absolute_url, std.fs.Dir, output_dir),
+ .absolute_path => bundler.processResolveQueue(.absolute_path, std.fs.Dir, output_dir),
+ .package_path => bundler.processResolveQueue(.package_path, std.fs.Dir, output_dir),
+ };
}
// if (log.level == .verbose) {
@@ -642,6 +1249,25 @@ pub fn NewBundler(cache_files: bool) type {
final_result.root_dir = bundler.options.output_dir_handle;
return final_result;
}
+
+ pub fn processResolveQueue(
+ bundler: *ThisBundler,
+ comptime import_path_format: options.BundleOptions.ImportPathFormat,
+ comptime Outstream: type,
+ outstream: Outstream,
+ ) !void {
+ while (bundler.resolve_queue.readItem()) |item| {
+ js_ast.Expr.Data.Store.reset();
+ js_ast.Stmt.Data.Store.reset();
+ const output_file = bundler.buildWithResolveResultEager(
+ item,
+ import_path_format,
+ Outstream,
+ outstream,
+ ) catch continue orelse continue;
+ bundler.output_files.append(output_file) catch unreachable;
+ }
+ }
};
}
diff --git a/src/cache.zig b/src/cache.zig
index 668ceba1a..996f09588 100644
--- a/src/cache.zig
+++ b/src/cache.zig
@@ -11,6 +11,9 @@ const fs = @import("./fs.zig");
const sync = @import("sync.zig");
const Mutex = sync.Mutex;
+const import_record = @import("./import_record.zig");
+const ImportRecord = import_record.ImportRecord;
+
pub fn NewCache(comptime cache_files: bool) type {
return struct {
pub const Set = struct {
@@ -60,7 +63,13 @@ pub fn NewCache(comptime cache_files: bool) type {
c.entries.deinit();
}
- pub fn readFile(c: *Fs, _fs: *fs.FileSystem, path: string, dirname_fd: StoredFileDescriptorType, comptime use_shared_buffer: bool) !Entry {
+ pub fn readFile(
+ c: *Fs,
+ _fs: *fs.FileSystem,
+ path: string,
+ dirname_fd: StoredFileDescriptorType,
+ comptime use_shared_buffer: bool,
+ ) !Entry {
var rfs = _fs.fs;
if (cache_files) {
@@ -174,6 +183,9 @@ pub fn NewCache(comptime cache_files: bool) type {
) anyerror!?js_ast.Ast {
var temp_log = logger.Log.init(allocator);
defer temp_log.appendTo(log) catch {};
+ if (isDebug) {
+ Output.println("Parse!", .{});
+ }
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
return null;
@@ -183,6 +195,25 @@ pub fn NewCache(comptime cache_files: bool) type {
return if (result.ok) result.ast else null;
}
+
+ pub fn scan(
+ cache: *@This(),
+ allocator: *std.mem.Allocator,
+ scan_pass_result: *js_parser.ScanPassResult,
+ opts: js_parser.Parser.Options,
+ defines: *Define,
+ log: *logger.Log,
+ source: *const logger.Source,
+ ) anyerror!void {
+ var temp_log = logger.Log.init(allocator);
+ defer temp_log.appendTo(log) catch {};
+
+ var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch |err| {
+ return;
+ };
+
+ return try parser.scanImports(scan_pass_result);
+ }
};
pub const Json = struct {
diff --git a/src/cli.zig b/src/cli.zig
index cdf71b568..d3fba1608 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -27,6 +27,8 @@ pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
+const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
+
pub const Cli = struct {
const LoaderMatcher = strings.ExactSizeMatcher(4);
pub fn ColonListType(comptime t: type, value_resolver: anytype) type {
@@ -108,28 +110,30 @@ pub const Cli = struct {
pub fn parse(allocator: *std.mem.Allocator, stdout: anytype, stderr: anytype) !Api.TransformOptions {
@setEvalBranchQuota(9999);
const params = comptime [_]clap.Param(clap.Help){
- clap.parseParam("-h, --help Display this help and exit. ") catch unreachable,
- clap.parseParam("-r, --resolve <STR> Determine import/require behavior. \"disable\" ignores. \"dev\" bundles node_modules and builds everything else as independent entry points") catch unreachable,
- clap.parseParam("-d, --define <STR>... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:development") catch unreachable,
- clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx (not implemented yet), ts (not implemented yet), css (not implemented yet)") catch unreachable,
- clap.parseParam("-o, --outdir <STR> Save output to directory (default: \"out\" if none provided and multiple entry points passed)") catch unreachable,
- clap.parseParam("-e, --external <STR>... Exclude module from transpilation (can use * wildcards). ex: -e react") catch unreachable,
- clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
- clap.parseParam("--cwd <STR> Absolute path to resolve entry points from. Defaults to cwd") catch unreachable,
- clap.parseParam("--public-url <STR> Rewrite import paths to start with --public-url. Useful for web browsers.") catch unreachable,
- clap.parseParam("--serve Start a local dev server. This also sets resolve to \"lazy\".") catch unreachable,
- clap.parseParam("--public-dir <STR> Top-level directory for .html files, fonts, images, or anything external. Only relevant with --serve. Defaults to \"<cwd>/public\", to match create-react-app and Next.js") catch unreachable,
- clap.parseParam("--jsx-factory <STR> Changes the function called when compiling JSX elements using the classic JSX runtime") catch unreachable,
- clap.parseParam("--jsx-fragment <STR> Changes the function called when compiling JSX fragments using the classic JSX runtime") catch unreachable,
- clap.parseParam("--jsx-import-source <STR> Declares the module specifier to be used for importing the jsx and jsxs factory functions. Default: \"react\"") catch unreachable,
- clap.parseParam("--jsx-runtime <STR> \"automatic\" (default) or \"classic\"") catch unreachable,
- clap.parseParam("--jsx-production Use jsx instead of jsxDEV (default) for the automatic runtime") catch unreachable,
- clap.parseParam("--extension-order <STR>... defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable,
- clap.parseParam("--react-fast-refresh Enable React Fast Refresh (not implemented yet)") catch unreachable,
- clap.parseParam("--tsconfig-override <STR> Load tsconfig from path instead of cwd/tsconfig.json") catch unreachable,
- clap.parseParam("--platform <STR> \"browser\" or \"node\". Defaults to \"browser\"") catch unreachable,
- clap.parseParam("--main-fields <STR>... Main fields to lookup in package.json. Defaults to --platform dependent") catch unreachable,
- clap.parseParam("<POS>... Entry points to use") catch unreachable,
+ clap.parseParam("-h, --help Display this help and exit. ") catch unreachable,
+ clap.parseParam("-r, --resolve <STR> Determine import/require behavior. \"disable\" ignores. \"dev\" bundles node_modules and builds everything else as independent entry points") catch unreachable,
+ clap.parseParam("-d, --define <STR>... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:development") catch unreachable,
+ clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx (not implemented yet), ts (not implemented yet), css (not implemented yet)") catch unreachable,
+ clap.parseParam("-o, --outdir <STR> Save output to directory (default: \"out\" if none provided and multiple entry points passed)") catch unreachable,
+ clap.parseParam("-e, --external <STR>... Exclude module from transpilation (can use * wildcards). ex: -e react") catch unreachable,
+ clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
+ clap.parseParam("--cwd <STR> Absolute path to resolve entry points from. Defaults to cwd") catch unreachable,
+ clap.parseParam("--public-url <STR> Rewrite import paths to start with --public-url. Useful for web browsers.") catch unreachable,
+ clap.parseParam("--serve Start a local dev server. This also sets resolve to \"lazy\".") catch unreachable,
+ clap.parseParam("--public-dir <STR> Top-level directory for .html files, fonts, images, or anything external. Only relevant with --serve. Defaults to \"<cwd>/public\", to match create-react-app and Next.js") catch unreachable,
+ clap.parseParam("--jsx-factory <STR> Changes the function called when compiling JSX elements using the classic JSX runtime") catch unreachable,
+ clap.parseParam("--jsx-fragment <STR> Changes the function called when compiling JSX fragments using the classic JSX runtime") catch unreachable,
+ clap.parseParam("--jsx-import-source <STR> Declares the module specifier to be used for importing the jsx and jsxs factory functions. Default: \"react\"") catch unreachable,
+ clap.parseParam("--jsx-runtime <STR> \"automatic\" (default) or \"classic\"") catch unreachable,
+ clap.parseParam("--jsx-production Use jsx instead of jsxDEV (default) for the automatic runtime") catch unreachable,
+ clap.parseParam("--extension-order <STR>... defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable,
+ clap.parseParam("--react-fast-refresh Enable React Fast Refresh (not implemented yet)") catch unreachable,
+ clap.parseParam("--tsconfig-override <STR> Load tsconfig from path instead of cwd/tsconfig.json") catch unreachable,
+ clap.parseParam("--platform <STR> \"browser\" or \"node\". Defaults to \"browser\"") catch unreachable,
+ clap.parseParam("--main-fields <STR>... Main fields to lookup in package.json. Defaults to --platform dependent") catch unreachable,
+ clap.parseParam("--scan Instead of bundling or transpiling, print a list of every file imported by an entry point, recursively") catch unreachable,
+ clap.parseParam("--jsbundle Generate a new node_modules.jsbundle file from the current node_modules folder and entry point(s)") catch unreachable,
+ clap.parseParam("<POS>... Entry points to use") catch unreachable,
};
var diag = clap.Diagnostic{};
@@ -274,6 +278,8 @@ pub const Cli = struct {
.extension_order = args.options("--extension-order"),
.main_fields = args.options("--main-fields"),
.platform = platform,
+ .only_scan_dependencies = if (args.flag("--scan")) Api.ScanDependencyMode.all else Api.ScanDependencyMode._none,
+ .generate_node_module_bundle = if (args.flag("--jsbundle")) true else false,
};
}
};
@@ -286,6 +292,12 @@ pub const Cli = struct {
return error.InvalidJSXRuntime;
}
}
+ pub fn printScanResults(scan_results: bundler.ScanResult.Summary, allocator: *std.mem.Allocator) !void {
+ var stdout = std.io.getStdOut();
+ const print_start = std.time.nanoTimestamp();
+ try std.json.stringify(scan_results.list(), .{}, stdout.writer());
+ Output.printError("\nJSON printing took: {d}\n", .{std.time.nanoTimestamp() - print_start});
+ }
pub fn startTransform(allocator: *std.mem.Allocator, args: Api.TransformOptions, log: *logger.Log) anyerror!void {}
pub fn start(allocator: *std.mem.Allocator, stdout: anytype, stderr: anytype, comptime MainPanicHandler: type) anyerror!void {
const start_time = std.time.nanoTimestamp();
@@ -294,6 +306,12 @@ pub const Cli = struct {
MainPanicHandler.Singleton = &panicker;
var args = try Arguments.parse(alloc.static, stdout, stderr);
+ if ((args.entry_points.len == 1 and args.entry_points[0].len > ".jsbundle".len and args.entry_points[0][args.entry_points[0].len - ".jsbundle".len] == '.' and strings.eqlComptime(args.entry_points[0][args.entry_points[0].len - "jsbundle".len ..], "jsbundle"))) {
+ var out_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ var input = try std.fs.openFileAbsolute(try std.os.realpath(args.entry_points[0], &out_buffer), .{ .read = true });
+ try NodeModuleBundle.printBundle(std.fs.File, input, @TypeOf(stdout), stdout);
+ return;
+ }
if (args.serve orelse false) {
try Server.start(allocator, args);
@@ -301,6 +319,17 @@ pub const Cli = struct {
return;
}
+ if ((args.only_scan_dependencies orelse ._none) == .all) {
+ return try printScanResults(try bundler.Bundler.scanDependencies(allocator, &log, args), allocator);
+ }
+
+ if ((args.generate_node_module_bundle orelse false)) {
+ var this_bundler = try bundler.ServeBundler.init(allocator, &log, args);
+ this_bundler.configureLinker();
+ try bundler.ServeBundler.GenerateNodeModuleBundle.generate(&this_bundler, allocator);
+ return;
+ }
+
var result: options.TransformResult = undefined;
switch (args.resolve orelse Api.ResolveMode.dev) {
Api.ResolveMode.disable => {
diff --git a/src/defines.zig b/src/defines.zig
index 08447cde1..dab384600 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -108,7 +108,25 @@ pub const DefineData = struct {
.e_missing => {
continue;
},
- .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
+ // We must copy so we don't recycle
+ .e_string => {
+ const e_string = try expr.data.e_string.clone(allocator);
+ expr.data.e_string.* = e_string;
+ data = expr.data;
+ },
+ .e_null, .e_boolean, .e_number => {
+ data = expr.data;
+ },
+ // We must copy so we don't recycle
+ .e_object => |obj| {
+ expr.data.e_object = try allocator.create(js_ast.E.Object);
+ expr.data.e_object.* = obj.*;
+ data = expr.data;
+ },
+ // We must copy so we don't recycle
+ .e_array => |obj| {
+ expr.data.e_array = try allocator.create(js_ast.E.Array);
+ expr.data.e_array.* = obj.*;
data = expr.data;
},
else => {
diff --git a/src/fs.zig b/src/fs.zig
index 77e85dcac..7dbcc4f40 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -970,6 +970,10 @@ pub const Path = struct {
name: PathName,
is_disabled: bool = false,
+ pub fn jsonStringify(self: *const @This(), options: anytype, writer: anytype) !void {
+ return try std.json.stringify(self.text, options, writer);
+ }
+
pub fn generateKey(p: *Path, allocator: *std.mem.Allocator) !string {
return try std.fmt.allocPrint(allocator, "{s}://{s}", .{ p.namespace, p.text });
}
diff --git a/src/http.zig b/src/http.zig
index 4eacdf862..8e3d4b2c1 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -581,7 +581,14 @@ pub const RequestContext = struct {
// It will call flush for us automatically
defer ctx.bundler.resetStore();
const loader = ctx.bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
- var written = try ctx.bundler.buildWithResolveResult(resolve_result, ctx.allocator, loader, SocketPrinter, chunked_encoder);
+ var written = try ctx.bundler.buildWithResolveResult(
+ resolve_result,
+ ctx.allocator,
+ loader,
+ SocketPrinter,
+ chunked_encoder,
+ .absolute_url,
+ );
},
.noop => {
try ctx.sendNotFound();
diff --git a/src/import_record.zig b/src/import_record.zig
index cce23af16..69c498882 100644
--- a/src/import_record.zig
+++ b/src/import_record.zig
@@ -31,6 +31,10 @@ pub const ImportKind = enum(u8) {
internal,
+ pub fn jsonStringify(self: @This(), options: anytype, writer: anytype) !void {
+ return try std.json.stringify(@tagName(self), options, writer);
+ }
+
pub fn isFromCSS(k: ImportKind) bool {
return k == .at_conditional or k == .at or k == .url;
}
@@ -54,6 +58,8 @@ pub const ImportRecord = struct {
// resolved.
handles_import_errors: bool = false,
+ is_internal: bool = false,
+
// Sometimes the parser creates an import record and decides it isn't needed.
// For example, TypeScript code may have import statements that later turn
// out to be type-only imports after analyzing the whole file.
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 05de5c2da..50a4b4d6a 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -45,7 +45,7 @@ pub fn NewBaseStore(comptime Union: anytype, comptime count: usize) type {
};
block: Block,
- overflow_ptrs: [500]*Block = undefined,
+ overflow_ptrs: [10_000]*Block = undefined,
overflow: []*Block = &([_]*Block{}),
overflow_used: usize = 0,
allocator: *Allocator,
@@ -1013,6 +1013,20 @@ pub const E = struct {
utf8: string = &([_]u8{}),
prefer_template: bool = false,
+ pub fn clone(str: *const String, allocator: *std.mem.Allocator) !String {
+ if (str.isUTF8()) {
+ return String{
+ .utf8 = try allocator.dupe(u8, str.utf8),
+ .prefer_template = str.prefer_template,
+ };
+ } else {
+ return String{
+ .value = try allocator.dupe(u16, str.value),
+ .prefer_template = str.prefer_template,
+ };
+ }
+ }
+
pub fn isUTF8(s: *const String) bool {
return s.utf8.len > 0;
}
@@ -2219,6 +2233,14 @@ pub const Expr = struct {
};
},
E.String => {
+ if (isDebug) {
+ // Sanity check: assert string is not a null ptr
+ if (st.isUTF8()) {
+ std.debug.assert(st.utf8[0] > 0);
+ } else if (st.value.len > 0) {
+ std.debug.assert(st.value[0] > 0);
+ }
+ }
return Expr{
.loc = loc,
.data = Data{
@@ -2302,7 +2324,7 @@ pub const Expr = struct {
return Expr{
.loc = loc,
.data = Data{
- .e_string = st,
+ .e_string = Data.Store.All.append(@TypeOf(st.*), st.*),
},
};
},
diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig
index 2d3989b12..546ac3f58 100644
--- a/src/js_parser/js_parser.zig
+++ b/src/js_parser/js_parser.zig
@@ -408,7 +408,7 @@ pub const ImportScanner = struct {
if (@as(Expr.Tag, val.data) == .e_identifier) {
// Is this import statement unused?
if (@as(Binding.Tag, decl.binding.data) == .b_identifier and p.symbols.items[decl.binding.data.b_identifier.ref.inner_index].use_count_estimate == 0) {
- p.ignoreUsage(val.getIdentifier().ref);
+ p.ignoreUsage(val.data.e_identifier.ref);
scanner.removed_import_equals = true;
continue;
@@ -734,33 +734,22 @@ pub const SideEffects = enum(u2) {
equality.ok = @as(Expr.Tag, right) == Expr.Tag.e_undefined;
equality.equal = equality.ok;
},
- .e_boolean => {
- const l = left.e_boolean;
- const r = right.e_boolean;
-
+ .e_boolean => |l| {
equality.ok = @as(Expr.Tag, right) == Expr.Tag.e_boolean;
- equality.equal = equality.ok and l.value == r.value;
+ equality.equal = equality.ok and l.value == right.e_boolean.value;
},
- .e_number => {
- const l = left.e_number;
- const r = right.e_number;
-
+ .e_number => |l| {
equality.ok = @as(Expr.Tag, right) == Expr.Tag.e_number;
- equality.equal = equality.ok and l.value == r.value;
+ equality.equal = equality.ok and l.value == right.e_number.value;
},
- .e_big_int => {
- const l = left.e_big_int;
- const r = right.e_big_int;
-
+ .e_big_int => |l| {
equality.ok = @as(Expr.Tag, right) == Expr.Tag.e_big_int;
- equality.equal = equality.ok and strings.eql(l.value, r.value);
+ equality.equal = equality.ok and strings.eql(l.value, right.e_big_int.value);
},
- .e_string => {
- const l = left.e_string;
- const r = right.e_string;
-
+ .e_string => |l| {
equality.ok = @as(Expr.Tag, right) == Expr.Tag.e_string;
if (equality.ok) {
+ const r = right.e_string;
equality.equal = r.eql(E.String, l);
}
},
@@ -1455,6 +1444,23 @@ const PropertyOpts = struct {
ts_decorators: []Expr = &[_]Expr{},
};
+pub const ScanPassResult = struct {
+ import_records: List(ImportRecord),
+ named_imports: js_ast.Ast.NamedImports,
+
+ pub fn init(allocator: *std.mem.Allocator) ScanPassResult {
+ return .{
+ .import_records = List(ImportRecord).init(allocator),
+ .named_imports = js_ast.Ast.NamedImports.init(allocator),
+ };
+ }
+
+ pub fn reset(scan_pass: *ScanPassResult) void {
+ scan_pass.named_imports.clearRetainingCapacity();
+ scan_pass.import_records.shrinkRetainingCapacity(0);
+ }
+};
+
pub const Parser = struct {
options: Options,
lexer: js_lexer.Lexer,
@@ -1474,6 +1480,9 @@ pub const Parser = struct {
use_define_for_class_fields: bool = false,
suppress_warnings_about_weird_code: bool = true,
+ // Used when bundling node_modules
+ output_commonjs: bool = false,
+
moduleType: ModuleType = ModuleType.esm,
trim_unused_imports: bool = true,
@@ -1488,6 +1497,34 @@ pub const Parser = struct {
}
};
+ pub fn scanImports(self: *Parser, scan_pass: *ScanPassResult) !void {
+ if (self.options.ts and self.options.jsx.parse) {
+ return try self._scanImports(TSXImportScanner, scan_pass);
+ } else if (self.options.ts) {
+ return try self._scanImports(TypeScriptImportScanner, scan_pass);
+ } else if (self.options.jsx.parse) {
+ return try self._scanImports(JSXImportScanner, scan_pass);
+ } else {
+ return try self._scanImports(JavaScriptImportScanner, scan_pass);
+ }
+ }
+
+ fn _scanImports(self: *Parser, comptime ParserType: type, scan_pass: *ScanPassResult) !void {
+ var p: ParserType = undefined;
+ try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
+ p.import_records = &scan_pass.import_records;
+ p.named_imports = &scan_pass.named_imports;
+ // Parse the file in the first pass, but do not bind symbols
+ var opts = ParseStatementOptions{ .is_module_scope = true };
+ debugl("<p.parseStmtsUpTo>");
+
+ // Parsing seems to take around 2x as much time as visiting.
+ // Which makes sense.
+ // June 4: "Parsing took: 18028000"
+ // June 4: "Rest of this took: 8003000"
+ _ = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
+ }
+
pub fn parse(self: *Parser) !js_ast.Result {
if (self.options.ts and self.options.jsx.parse) {
return try self._parse(TSXParser);
@@ -1501,7 +1538,8 @@ pub const Parser = struct {
}
fn _parse(self: *Parser, comptime ParserType: type) !js_ast.Result {
- var p = try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options);
+ var p: ParserType = undefined;
+ try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
var result: js_ast.Result = undefined;
@@ -1515,6 +1553,11 @@ pub const Parser = struct {
// Parse the file in the first pass, but do not bind symbols
var opts = ParseStatementOptions{ .is_module_scope = true };
debugl("<p.parseStmtsUpTo>");
+
+ // Parsing seems to take around 2x as much time as visiting.
+ // Which makes sense.
+ // June 4: "Parsing took: 18028000"
+ // June 4: "Rest of this took: 8003000"
const stmts = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
debugl("</p.parseStmtsUpTo>");
try p.prepareForVisitPass();
@@ -1787,7 +1830,7 @@ pub const Parser = struct {
p.generateImportStmt(RuntimeImports.Name, &imports, &before, p.runtime_imports, null, "import_") catch unreachable;
}
- if (p.cjs_import_stmts.items.len > 0) {
+ if (p.cjs_import_stmts.items.len > 0 and !p.options.output_commonjs) {
var import_records = try p.allocator.alloc(u32, p.cjs_import_stmts.items.len);
var declared_symbols = try p.allocator.alloc(js_ast.DeclaredSymbol, p.cjs_import_stmts.items.len);
@@ -1956,7 +1999,18 @@ var falseExprValueData = E.Boolean{ .value = false };
var nullValueExpr = Expr.Data{ .e_null = nullExprValueData };
var falseValueExpr = Expr.Data{ .e_boolean = E.Boolean{ .value = false } };
-pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled: bool) type {
+pub const ImportOrRequireScanResults = struct {
+ import_records: List(ImportRecord),
+};
+
+pub fn NewParser(
+ comptime is_typescript_enabled: bool,
+ comptime is_jsx_enabled: bool,
+ comptime only_scan_imports_and_do_not_visit: bool,
+) type {
+ const ImportRecordList = if (only_scan_imports_and_do_not_visit) *std.ArrayList(ImportRecord) else std.ArrayList(ImportRecord);
+ const NamedImportsType = if (only_scan_imports_and_do_not_visit) *js_ast.Ast.NamedImports else js_ast.Ast.NamedImports;
+
// P is for Parser!
// public only because of Binding.ToExpr
return struct {
@@ -2042,7 +2096,7 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
jsx_source_list_ref: js_ast.Ref = Ref.None,
// Imports (both ES6 and CommonJS) are tracked at the top level
- import_records: List(ImportRecord),
+ import_records: ImportRecordList,
import_records_for_current_part: List(u32),
export_star_import_records: List(u32),
@@ -2052,7 +2106,7 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
enclosing_class_keyword: logger.Range = logger.Range.None,
import_items_for_namespace: Map(js_ast.Ref, StringHashMap(js_ast.LocRef)),
is_import_item: RefBoolMap,
- named_imports: js_ast.Ast.NamedImports,
+ named_imports: NamedImportsType,
named_exports: js_ast.Ast.NamedExports,
top_level_symbol_to_parts: Map(js_ast.Ref, List(u32)),
import_namespace_cc_map: Map(ImportNamespaceCallOrConstruct, bool),
@@ -2300,6 +2354,23 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
pub fn s(p: *P, t: anytype, loc: logger.Loc) Stmt {
// Output.print("\nStmt: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start });
if (@typeInfo(@TypeOf(t)) == .Pointer) {
+ // ExportFrom normally becomes import records during the visiting pass
+ // However, we skip the visiting pass in this mode
+ // So we must generate a minimum version of it here.
+ if (comptime only_scan_imports_and_do_not_visit) {
+ // if (@TypeOf(t) == *S.ExportFrom) {
+ // switch (call.target.data) {
+ // .e_identifier => |ident| {
+ // // is this a require("something")
+ // if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args[0].data) == .e_string) {
+ // _ = p.addImportRecord(.require, loc, call.args[0].data.e_string.string(p.allocator) catch unreachable);
+ // }
+ // },
+ // else => {},
+ // }
+ // }
+ }
+
return Stmt.init(t, loc);
} else {
return Stmt.alloc(p.allocator, t, loc);
@@ -2310,8 +2381,36 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
// Output.print("\nExpr: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start });
if (@typeInfo(@TypeOf(t)) == .Pointer) {
+ if (comptime only_scan_imports_and_do_not_visit) {
+ if (@TypeOf(t) == *E.Call) {
+ const call: *E.Call = t;
+ switch (call.target.data) {
+ .e_identifier => |ident| {
+ // is this a require("something")
+ if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args[0].data) == .e_string) {
+ _ = p.addImportRecord(.require, loc, call.args[0].data.e_string.string(p.allocator) catch unreachable);
+ }
+ },
+ else => {},
+ }
+ }
+ }
return Expr.init(t, loc);
} else {
+ if (comptime only_scan_imports_and_do_not_visit) {
+ if (@TypeOf(t) == E.Call) {
+ const call: E.Call = t;
+ switch (call.target.data) {
+ .e_identifier => |ident| {
+ // is this a require("something")
+ if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args[0].data) == .e_string) {
+ _ = p.addImportRecord(.require, loc, call.args[0].data.e_string.string(p.allocator) catch unreachable);
+ }
+ },
+ else => {},
+ }
+ }
+ }
return Expr.alloc(p.allocator, t, loc);
}
}
@@ -2640,7 +2739,9 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
comptime suffix: string,
) !void {
const import_record_i = p.addImportRecordByRange(.stmt, logger.Range.None, import_path);
- var import_record = p.import_records.items[import_record_i];
+ var import_record: *ImportRecord = &p.import_records.items[import_record_i];
+
+ import_record.is_internal = true;
var import_path_identifier = try import_record.path.name.nonUniqueNameString(p.allocator);
var namespace_identifier = try p.allocator.alloc(u8, import_path_identifier.len + suffix.len);
var clause_items = try p.allocator.alloc(js_ast.ClauseItem, imports.len);
@@ -2733,8 +2834,14 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
p.hoistSymbols(p.module_scope);
p.require_ref = try p.declareCommonJSSymbol(.unbound, "require");
- p.exports_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "exports");
- p.module_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "module");
+
+ if (p.options.output_commonjs) {
+ p.exports_ref = try p.declareCommonJSSymbol(.hoisted, "exports");
+ p.module_ref = try p.declareCommonJSSymbol(.hoisted, "module");
+ } else {
+ p.exports_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "exports");
+ p.module_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "module");
+ }
p.runtime_imports.__require = p.require_ref;
@@ -6429,8 +6536,9 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
return true;
}
- pub fn declareCommonJSSymbol(p: *P, kind: Symbol.Kind, name: string) !Ref {
- const member = p.module_scope.members.get(name);
+ pub fn declareCommonJSSymbol(p: *P, comptime kind: Symbol.Kind, comptime name: string) !Ref {
+ const name_hash = comptime @TypeOf(p.module_scope.members).getHash(name);
+ const member = p.module_scope.members.getWithHash(name, name_hash);
// If the code declared this symbol using "var name", then this is actually
// not a collision. For example, node will let you do this:
@@ -6460,7 +6568,7 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
const ref = try p.newSymbol(kind, name);
if (member == null) {
- try p.module_scope.members.put(name, Scope.Member{ .ref = ref, .loc = logger.Loc.Empty });
+ try p.module_scope.members.putWithHash(name, name_hash, Scope.Member{ .ref = ref, .loc = logger.Loc.Empty });
return ref;
}
@@ -9540,6 +9648,10 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
}
pub fn visitStmtsAndPrependTempRefs(p: *P, stmts: *List(Stmt), opts: *PrependTempRefsOpts) !void {
+ if (only_scan_imports_and_do_not_visit) {
+ @compileError("only_scan_imports_and_do_not_visit must not run this.");
+ }
+
var old_temp_refs = p.temp_refs_to_declare;
var old_temp_ref_count = p.temp_ref_count;
p.temp_refs_to_declare.deinit();
@@ -9568,10 +9680,18 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
}
pub fn visitExpr(p: *P, expr: Expr) Expr {
- return p.visitExprInOut(expr, ExprIn{});
+ if (only_scan_imports_and_do_not_visit) {
+ @compileError("only_scan_imports_and_do_not_visit must not run this.");
+ }
+ // Inline to avoid the extra unnecessary function call in the stack
+ return @call(.{ .modifier = .always_inline }, P.visitExprInOut, .{ p, expr, ExprIn{} });
}
pub fn visitFunc(p: *P, _func: G.Fn, open_parens_loc: logger.Loc) G.Fn {
+ if (only_scan_imports_and_do_not_visit) {
+ @compileError("only_scan_imports_and_do_not_visit must not run this.");
+ }
+
var func = _func;
const old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_visit);
const old_fn_only_data = std.mem.toBytes(p.fn_only_data_visit);
@@ -10170,7 +10290,7 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
// Optionally preserve the name
if (@as(Expr.Tag, e_.left.data) == .e_identifier) {
- e_.right = p.maybeKeepExprSymbolName(e_.right, p.symbols.items[e_.left.getIdentifier().ref.inner_index].original_name, was_anonymous_named_expr);
+ e_.right = p.maybeKeepExprSymbolName(e_.right, p.symbols.items[e_.left.data.e_identifier.ref.inner_index].original_name, was_anonymous_named_expr);
}
},
.bin_add_assign => {
@@ -10249,14 +10369,14 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
// though this is a run-time error, we make it a compile-time error when
// bundling because scope hoisting means these will no longer be run-time
// errors.
- if ((in.assign_target != .none or is_delete_target) and @as(Expr.Tag, e_.target.data) == .e_identifier and p.symbols.items[e_.target.getIdentifier().ref.inner_index].kind == .import) {
+ if ((in.assign_target != .none or is_delete_target) and @as(Expr.Tag, e_.target.data) == .e_identifier and p.symbols.items[e_.target.data.e_identifier.ref.inner_index].kind == .import) {
const r = js_lexer.rangeOfIdentifier(p.source, e_.target.loc);
p.log.addRangeErrorFmt(
p.source,
r,
p.allocator,
"Cannot assign to property on import \"{s}\"",
- .{p.symbols.items[e_.target.getIdentifier().ref.inner_index].original_name},
+ .{p.symbols.items[e_.target.data.e_identifier.ref.inner_index].original_name},
) catch unreachable;
}
@@ -11334,16 +11454,13 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
var val = d.value orelse unreachable;
const was_anonymous_named_expr = p.isAnonymousNamedExpr(val);
- val = p.visitExpr(val);
- // go version of defer would cause this to reset the variable
- // zig version of defer causes this to set it to the last value of val, at the end of the scope.
- d.value = val;
+ d.value = p.visitExpr(val);
// Optionally preserve the name
switch (d.binding.data) {
.b_identifier => |id| {
- val = p.maybeKeepExprSymbolName(
- val,
+ d.value = p.maybeKeepExprSymbolName(
+ d.value.?,
p.symbols.items[id.ref.inner_index].original_name,
was_anonymous_named_expr,
);
@@ -11460,7 +11577,8 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
// TODO: simplify boolean expression
},
.s_if => |data| {
- data.test_ = SideEffects.simplifyBoolean(p, p.visitExpr(data.test_));
+ var test__ = p.visitExpr(data.test_);
+ data.test_ = SideEffects.simplifyBoolean(p, test__);
const effects = SideEffects.toBoolean(data.test_.data);
if (effects.ok and !effects.value) {
@@ -12186,6 +12304,9 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
},
);
},
+ .e_string => |str| {
+ return p.e(str, loc);
+ },
else => {},
}
@@ -12394,6 +12515,10 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
}
pub fn visitClass(p: *P, name_scope_loc: logger.Loc, class: *G.Class) Ref {
+ if (only_scan_imports_and_do_not_visit) {
+ @compileError("only_scan_imports_and_do_not_visit must not run this.");
+ }
+
class.ts_decorators = p.visitTSDecorators(class.ts_decorators);
if (class.class_name) |name| {
@@ -12540,6 +12665,10 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
// Try separating the list for appending, so that it's not a pointer.
fn visitStmts(p: *P, stmts: *List(Stmt), kind: StmtsKind) !void {
+ if (only_scan_imports_and_do_not_visit) {
+ @compileError("only_scan_imports_and_do_not_visit must not run this.");
+ }
+
// Save the current control-flow liveness. This represents if we are
// currently inside an "if (false) { ... }" block.
var old_is_control_flow_dead = p.is_control_flow_dead;
@@ -13025,6 +13154,7 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
.symbols = p.symbols.items,
.exports_ref = p.exports_ref,
.wrapper_ref = null,
+ .module_ref = p.module_ref,
.import_records = p.import_records.items,
.export_star_import_records = p.export_star_import_records.items,
.top_level_symbol_to_parts = p.top_level_symbol_to_parts,
@@ -13038,7 +13168,15 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
};
}
- pub fn init(allocator: *std.mem.Allocator, log: *logger.Log, source: *const logger.Source, define: *Define, lexer: js_lexer.Lexer, opts: Parser.Options) !*P {
+ pub fn init(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ source: *const logger.Source,
+ define: *Define,
+ lexer: js_lexer.Lexer,
+ opts: Parser.Options,
+ this: *P,
+ ) !void {
var scope_order = try ScopeOrderList.initCapacity(allocator, 1);
var scope = try allocator.create(Scope);
scope.* = Scope{
@@ -13053,11 +13191,8 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
};
scope_order.appendAssumeCapacity(ScopeOrder{ .loc = locModuleScope, .scope = scope });
-
- var _parser = try allocator.create(P);
-
- _parser.* = P{
- .cjs_import_stmts = @TypeOf(_parser.cjs_import_stmts).init(allocator),
+ this.* = P{
+ .cjs_import_stmts = @TypeOf(this.cjs_import_stmts).init(allocator),
// This must default to true or else parsing "in" won't work right.
// It will fail for the case in the "in-keyword.js" file
.allow_in = true,
@@ -13068,44 +13203,54 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
.stmt_expr_value = nullExprData,
.expr_list = List(Expr).init(allocator),
.loop_body = nullStmtData,
- .injected_define_symbols = @TypeOf(_parser.injected_define_symbols).init(allocator),
- .emitted_namespace_vars = @TypeOf(_parser.emitted_namespace_vars).init(allocator),
- .is_exported_inside_namespace = @TypeOf(_parser.is_exported_inside_namespace).init(allocator),
- .known_enum_values = @TypeOf(_parser.known_enum_values).init(allocator),
- .local_type_names = @TypeOf(_parser.local_type_names).init(allocator),
- .allocated_names = @TypeOf(_parser.allocated_names).init(allocator),
+ .injected_define_symbols = @TypeOf(this.injected_define_symbols).init(allocator),
+ .emitted_namespace_vars = @TypeOf(this.emitted_namespace_vars).init(allocator),
+ .is_exported_inside_namespace = @TypeOf(this.is_exported_inside_namespace).init(allocator),
+ .known_enum_values = @TypeOf(this.known_enum_values).init(allocator),
+ .local_type_names = @TypeOf(this.local_type_names).init(allocator),
+ .allocated_names = @TypeOf(this.allocated_names).init(allocator),
.define = define,
- .scopes_for_current_part = @TypeOf(_parser.scopes_for_current_part).init(allocator),
- .symbols = @TypeOf(_parser.symbols).init(allocator),
- .ts_use_counts = @TypeOf(_parser.ts_use_counts).init(allocator),
- .declared_symbols = @TypeOf(_parser.declared_symbols).init(allocator),
- .import_records = @TypeOf(_parser.import_records).init(allocator),
- .import_records_for_current_part = @TypeOf(_parser.import_records_for_current_part).init(allocator),
- .export_star_import_records = @TypeOf(_parser.export_star_import_records).init(allocator),
- .import_items_for_namespace = @TypeOf(_parser.import_items_for_namespace).init(allocator),
- .named_imports = @TypeOf(_parser.named_imports).init(allocator),
- .named_exports = @TypeOf(_parser.named_exports).init(allocator),
- .top_level_symbol_to_parts = @TypeOf(_parser.top_level_symbol_to_parts).init(allocator),
- .import_namespace_cc_map = @TypeOf(_parser.import_namespace_cc_map).init(allocator),
+ .scopes_for_current_part = @TypeOf(this.scopes_for_current_part).init(allocator),
+ .symbols = @TypeOf(this.symbols).init(allocator),
+ .ts_use_counts = @TypeOf(this.ts_use_counts).init(allocator),
+ .declared_symbols = @TypeOf(this.declared_symbols).init(allocator),
+ .import_records = undefined,
+ .import_records_for_current_part = @TypeOf(this.import_records_for_current_part).init(allocator),
+ .export_star_import_records = @TypeOf(this.export_star_import_records).init(allocator),
+ .import_items_for_namespace = @TypeOf(this.import_items_for_namespace).init(allocator),
+ .named_imports = undefined,
+ .named_exports = @TypeOf(this.named_exports).init(allocator),
+ .top_level_symbol_to_parts = @TypeOf(this.top_level_symbol_to_parts).init(allocator),
+ .import_namespace_cc_map = @TypeOf(this.import_namespace_cc_map).init(allocator),
.scopes_in_order = scope_order,
.current_scope = scope,
- .temp_refs_to_declare = @TypeOf(_parser.temp_refs_to_declare).init(allocator),
- .relocated_top_level_vars = @TypeOf(_parser.relocated_top_level_vars).init(allocator),
+ .temp_refs_to_declare = @TypeOf(this.temp_refs_to_declare).init(allocator),
+ .relocated_top_level_vars = @TypeOf(this.relocated_top_level_vars).init(allocator),
.log = log,
- .is_import_item = @TypeOf(_parser.is_import_item).init(allocator),
+ .is_import_item = @TypeOf(this.is_import_item).init(allocator),
.allocator = allocator,
.options = opts,
.then_catch_chain = ThenCatchChain{ .next_target = nullExprData },
- .to_expr_wrapper_namespace = Binding2ExprWrapper.Namespace.init(_parser),
- .to_expr_wrapper_hoisted = Binding2ExprWrapper.Hoisted.init(_parser),
+ .to_expr_wrapper_namespace = undefined,
+ .to_expr_wrapper_hoisted = undefined,
+ .import_transposer = undefined,
+ .require_transposer = undefined,
+ .require_resolve_transposer = undefined,
.source = source,
- .import_transposer = @TypeOf(_parser.import_transposer).init(_parser),
- .require_transposer = @TypeOf(_parser.require_transposer).init(_parser),
- .require_resolve_transposer = @TypeOf(_parser.require_resolve_transposer).init(_parser),
+
.lexer = lexer,
};
- return _parser;
+ if (!only_scan_imports_and_do_not_visit) {
+ this.import_records = @TypeOf(this.import_records).init(allocator);
+ this.named_imports = NamedImportsType.init(allocator);
+ }
+
+ this.to_expr_wrapper_namespace = Binding2ExprWrapper.Namespace.init(this);
+ this.to_expr_wrapper_hoisted = Binding2ExprWrapper.Hoisted.init(this);
+ this.import_transposer = @TypeOf(this.import_transposer).init(this);
+ this.require_transposer = @TypeOf(this.require_transposer).init(this);
+ this.require_resolve_transposer = @TypeOf(this.require_resolve_transposer).init(this);
}
};
}
@@ -13121,10 +13266,15 @@ pub fn NewParser(comptime is_typescript_enabled: bool, comptime is_jsx_enabled:
// Range (min … max): 24.1 ms … 39.7 ms 500 runs
// '../../build/macos-x86_64/esdev node_modules/react-dom/cjs/react-dom.development.js --resolve=disable' ran
// 1.02 ± 0.07 times faster than '../../esdev.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable'
-const JavaScriptParser = NewParser(false, false);
-const JSXParser = NewParser(false, true);
-const TSXParser = NewParser(true, true);
-const TypeScriptParser = NewParser(true, false);
+const JavaScriptParser = NewParser(false, false, false);
+const JSXParser = NewParser(false, true, false);
+const TSXParser = NewParser(true, true, false);
+const TypeScriptParser = NewParser(true, false, false);
+
+const JavaScriptImportScanner = NewParser(false, false, true);
+const JSXImportScanner = NewParser(false, true, true);
+const TSXImportScanner = NewParser(true, true, true);
+const TypeScriptImportScanner = NewParser(true, false, true);
// The "await" and "yield" expressions are never allowed in argument lists but
// may or may not be allowed otherwise depending on the details of the enclosing
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 49f3559db..e5fe6b7d4 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -117,7 +117,12 @@ const ExprFlag = packed struct {
}
};
-pub fn NewPrinter(comptime ascii_only: bool, comptime Writer: type, comptime Linker: type) type {
+pub fn NewPrinter(
+ comptime ascii_only: bool,
+ comptime Writer: type,
+ comptime Linker: type,
+ comptime rewrite_esm_to_cjs: bool,
+) type {
// comptime const comptime_buf_len = 64;
// comptime var comptime_buf = [comptime_buf_len]u8{};
// comptime var comptime_buf_i: usize = 0;
@@ -2563,6 +2568,10 @@ pub fn NewPrinter(comptime ascii_only: bool, comptime Writer: type, comptime Lin
const record = p.import_records[s.import_record_index];
var item_count: usize = 0;
+ if (rewrite_esm_to_cjs) {
+ return p.printImportAsCommonJS(record, s, stmt);
+ }
+
p.printIndent();
p.printSpaceBeforeIdentifier();
@@ -2748,6 +2757,8 @@ pub fn NewPrinter(comptime ascii_only: bool, comptime Writer: type, comptime Lin
}
}
+ pub fn printImportAsCommonJS(p: *Printer, record: importRecord.ImportRecord, s: *S.Import, stmt: Stmt) void {}
+
pub fn printForLoopInit(p: *Printer, initSt: Stmt) void {
switch (initSt.data) {
.s_expr => |s| {
@@ -3079,6 +3090,11 @@ const FileWriterInternal = struct {
threadlocal var buffer: MutableString = undefined;
threadlocal var has_loaded_buffer: bool = false;
+ pub fn getBuffer() *MutableString {
+ buffer.reset();
+ return &buffer;
+ }
+
pub fn init(file: std.fs.File) FileWriterInternal {
// if (isMac) {
// _ = std.os.fcntl(file.handle, std.os.F_NOCACHE, 1) catch 0;
@@ -3140,7 +3156,42 @@ pub fn printAst(
comptime LinkerType: type,
linker: ?*LinkerType,
) !usize {
- const PrinterType = NewPrinter(false, Writer, LinkerType);
+ const PrinterType = NewPrinter(false, Writer, LinkerType, false);
+ var writer = _writer;
+ var printer = try PrinterType.init(
+ writer,
+ &tree,
+ source,
+ symbols,
+ opts,
+ linker,
+ );
+ for (tree.parts) |part| {
+ for (part.stmts) |stmt| {
+ try printer.printStmt(stmt);
+ if (printer.writer.getError()) {} else |err| {
+ return err;
+ }
+ }
+ }
+
+ try printer.writer.done();
+
+ return @intCast(usize, std.math.max(printer.writer.written, 0));
+}
+
+pub fn printCommonJS(
+ comptime Writer: type,
+ _writer: Writer,
+ tree: Ast,
+ symbols: js_ast.Symbol.Map,
+ source: *const logger.Source,
+ ascii_only: bool,
+ opts: Options,
+ comptime LinkerType: type,
+ linker: ?*LinkerType,
+) !usize {
+ const PrinterType = NewPrinter(false, Writer, LinkerType, true);
var writer = _writer;
var printer = try PrinterType.init(
writer,
@@ -3159,6 +3210,9 @@ pub fn printAst(
}
}
+ // Add a couple extra newlines at the end
+ printer.writer.print(@TypeOf("\n\n"), "\n\n");
+
try printer.writer.done();
return @intCast(usize, std.math.max(printer.writer.written, 0));
diff --git a/src/linker.zig b/src/linker.zig
index 9c41bf4be..4d3d9ff69 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -79,7 +79,12 @@ pub fn NewLinker(comptime BundlerType: type) type {
// This modifies the Ast in-place!
// But more importantly, this does the following:
// - Wrap CommonJS files
- pub fn link(linker: *ThisLinker, file_path: Fs.Path, result: *_bundler.ParseResult) !void {
+ pub fn link(
+ linker: *ThisLinker,
+ file_path: Fs.Path,
+ result: *_bundler.ParseResult,
+ comptime import_path_format: Options.BundleOptions.ImportPathFormat,
+ ) !void {
var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny();
const source_dir = file_path.name.dir;
var externals = std.ArrayList(u32).init(linker.allocator);
@@ -87,21 +92,23 @@ pub fn NewLinker(comptime BundlerType: type) type {
// Step 1. Resolve imports & requires
switch (result.loader) {
.jsx, .js, .ts, .tsx => {
- for (result.ast.import_records) |*import_record, record_index| {
+ for (result.ast.import_records) |*import_record, _record_index| {
+ const record_index = @truncate(u32, _record_index);
if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) {
import_record.path = try linker.generateImportPath(
source_dir,
linker.runtime_source_path,
Runtime.version(),
+ import_path_format,
);
- result.ast.runtime_import_record_id = @truncate(u32, record_index);
+ result.ast.runtime_import_record_id = record_index;
result.ast.needs_runtime = true;
continue;
}
if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
if (resolved_import.is_external) {
- externals.append(@truncate(u32, record_index)) catch unreachable;
+ externals.append(record_index) catch unreachable;
continue;
}
@@ -110,6 +117,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
file_path.text[0 .. source_dir.len + 1],
resolved_import,
import_record,
+ import_path_format,
) catch continue;
// If we're importing a CommonJS module as ESM
@@ -181,6 +189,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
source_dir,
linker.runtime_source_path,
Runtime.version(),
+ import_path_format,
),
.range = logger.Range{ .loc = logger.Loc{ .start = 0 }, .len = 0 },
};
@@ -193,7 +202,13 @@ pub fn NewLinker(comptime BundlerType: type) type {
threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
threadlocal var relative_path_allocator_buf_loaded: bool = false;
- pub fn generateImportPath(linker: *ThisLinker, source_dir: string, source_path: string, package_version: ?string) !Fs.Path {
+ pub fn generateImportPath(
+ linker: *ThisLinker,
+ source_dir: string,
+ source_path: string,
+ package_version: ?string,
+ comptime import_path_format: Options.BundleOptions.ImportPathFormat,
+ ) !Fs.Path {
if (!relative_path_allocator_buf_loaded) {
relative_path_allocator_buf_loaded = true;
relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
@@ -208,7 +223,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
}
}
- switch (linker.options.import_path_format) {
+ switch (import_path_format) {
.relative => {
var pretty = try linker.allocator.dupe(u8, linker.fs.relative(source_dir, source_path));
var pathname = Fs.PathName.init(pretty);
@@ -261,7 +276,13 @@ pub fn NewLinker(comptime BundlerType: type) type {
}
}
- pub fn processImportRecord(linker: *ThisLinker, source_dir: string, resolve_result: *Resolver.Result, import_record: *ImportRecord) !void {
+ pub fn processImportRecord(
+ linker: *ThisLinker,
+ source_dir: string,
+ resolve_result: *Resolver.Result,
+ import_record: *ImportRecord,
+ comptime import_path_format: Options.BundleOptions.ImportPathFormat,
+ ) !void {
// extremely naive.
resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
@@ -277,6 +298,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
source_dir,
resolve_result.path_pair.primary.text,
resolve_result.package_json_version,
+ import_path_format,
);
}
diff --git a/src/logger.zig b/src/logger.zig
index 4d9e5150a..cd58e29d8 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -123,6 +123,31 @@ pub const Data = struct {
allocator.free(text);
}
+
+ pub fn writeFormat(
+ this: *const Data,
+ to: anytype,
+ kind: Kind,
+ ) !void {
+ if (this.text.len == 0) return;
+
+ if (this.location) |location| {
+ try std.fmt.format(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{} {d}", .{
+ kind.string(),
+ this.text,
+ location.line_text,
+ location.file,
+ location.line,
+ location.column,
+ location.offset,
+ });
+ } else {
+ try std.fmt.format(to, "\n\n{s}: {s}\n", .{
+ kind.string(),
+ this.text,
+ });
+ }
+ }
};
pub const Msg = struct {
@@ -144,26 +169,17 @@ pub const Msg = struct {
msg: *const Msg,
to: anytype,
) !void {
- if (msg.data.location) |location| {
- try std.fmt.format(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{} {d}", .{
- msg.kind.string(),
- msg.data.text,
- location.line_text,
- location.file,
- location.line,
- location.column,
- location.offset,
- });
- } else {
- try std.fmt.format(to, "\n\n{s}: {s}\n", .{
- msg.kind.string(),
- msg.data.text,
- });
+ try msg.data.writeFormat(to, msg.kind);
+
+ if (msg.notes) |notes| {
+ for (notes) |note| {
+ try note.writeFormat(to, msg.kind);
+ }
}
}
pub fn doFormat(msg: *const Msg, to: anytype, formatterFunc: anytype) !void {
- try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{} {d}", .{
+ try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{s}:{s} {d}", .{
msg.kind.string(),
msg.data.text,
msg.data.location.?.line_text,
@@ -368,7 +384,7 @@ pub const Log = struct {
// TODO:
pub fn print(self: *Log, to: anytype) !void {
for (self.msgs.items) |msg| {
- try msg.doFormat(to, std.fmt.format);
+ try msg.writeFormat(to);
}
}
};
diff --git a/src/node_module_bundle.zig b/src/node_module_bundle.zig
new file mode 100644
index 000000000..11150f376
--- /dev/null
+++ b/src/node_module_bundle.zig
@@ -0,0 +1,69 @@
+const Api = @import("./api/schema.zig").Api;
+const std = @import("std");
+usingnamespace @import("global.zig");
+
+pub const NodeModuleBundle = struct {
+ container: *Api.JavascriptBundleContainer,
+ bundle: *Api.JavascriptBundle,
+ allocator: *std.mem.Allocator,
+ fd: FileDescriptorType = 0,
+
+ pub const magic_bytes = "#!/usr/bin/env speedy\n\n";
+ threadlocal var jsbundle_prefix: [magic_bytes.len + 5]u8 = undefined;
+
+ pub fn getCodeEndPosition(stream: anytype, comptime needs_seek: bool) !u32 {
+ if (needs_seek) try stream.seekTo(0);
+
+ const read_bytes = try stream.read(&jsbundle_prefix);
+ if (read_bytes != jsbundle_prefix.len) {
+ return error.JSBundleBadHeaderTooShort;
+ }
+
+ return std.mem.readIntNative(u32, jsbundle_prefix[magic_bytes.len .. magic_bytes.len + 4]);
+ }
+
+ pub fn loadBundle(allocator: *std.mem.Allocator, stream: anytype) !NodeModuleBundle {
+ const end = try getCodeEndPosition(stream);
+ try stream.seekTo(end + 1);
+ var reader = stream.reader();
+ var container = try Api.JavascriptBundleContainer.decode(allocator, reader);
+ return NodeModuleBundle{
+ .allocator = allocator,
+ .container = container,
+ .bundle = container.bundle,
+ .fd = if (std.meta.trait.hasField("handle")(stream)) stream.handle else 0,
+ };
+ }
+
+ pub fn printBundle(
+ comptime StreamType: type,
+ input: StreamType,
+ comptime DestinationStreamType: type,
+ output: DestinationStreamType,
+ ) !void {
+ const BufferStreamContext = struct {
+ pub fn run(in: StreamType, out: DestinationStreamType, end_at: u32) !void {
+ var buf: [4096]u8 = undefined;
+ var remain = @intCast(i64, end_at);
+ var read_amount: i64 = @intCast(i64, in.read(&buf) catch 0);
+ while (remain > 0 and read_amount > 0) {
+ remain -= @intCast(i64, try out.write(buf[0..@intCast(usize, std.math.min(read_amount, remain))]));
+ read_amount = @intCast(i64, in.read(&buf) catch 0);
+ }
+
+ _ = try out.write(buf[0..@intCast(usize, remain + 1)]);
+ }
+ };
+ if (isMac) {
+ // darwin only allows reading ahead on/off, not specific amount
+ _ = std.os.fcntl(input.handle, std.os.F_RDAHEAD, 1) catch 0;
+ }
+ const end = try getCodeEndPosition(input, false);
+
+ try BufferStreamContext.run(
+ input,
+ output,
+ end,
+ );
+ }
+};
diff --git a/src/options.zig b/src/options.zig
index b68f753a3..fd44c9d0a 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -33,9 +33,11 @@ pub fn validatePath(log: *logger.Log, fs: *Fs.FileSystem.Implementation, cwd: st
pub fn stringHashMapFromArrays(comptime t: type, allocator: *std.mem.Allocator, keys: anytype, values: anytype) !t {
var hash_map = t.init(allocator);
- try hash_map.ensureCapacity(@intCast(u32, keys.len));
- for (keys) |key, i| {
- try hash_map.put(key, values[i]);
+ if (keys.len > 0) {
+ try hash_map.ensureCapacity(@intCast(u32, keys.len));
+ for (keys) |key, i| {
+ try hash_map.put(key, values[i]);
+ }
}
return hash_map;
@@ -508,6 +510,7 @@ pub const BundleOptions = struct {
absolute_url,
// omit file extension
absolute_path,
+ package_path,
};
pub const Defaults = struct {
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 9a7c12348..596a60c57 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -13,6 +13,7 @@ const MainFieldMap = std.StringHashMap(string);
const BrowserMap = std.StringHashMap(string);
pub const PackageJSON = struct {
+ name: string = "",
source: logger.Source,
main_fields: MainFieldMap,
module_type: options.ModuleType,
@@ -88,6 +89,12 @@ pub const PackageJSON = struct {
}
}
+ if (json.asProperty("name")) |version_json| {
+ if (version_json.expr.asString(r.allocator)) |version_str| {
+ package_json.name = r.allocator.dupe(u8, version_str) catch unreachable;
+ }
+ }
+
if (json.asProperty("type")) |type_json| {
if (type_json.expr.asString(r.allocator)) |type_str| {
switch (options.ModuleType.List.get(type_str) orelse options.ModuleType.unknown) {
diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig
index cc1b4ae9f..8e3e5a718 100644
--- a/src/resolver/resolve_path.zig
+++ b/src/resolver/resolve_path.zig
@@ -748,6 +748,7 @@ pub fn joinAbsStringBuf(_cwd: []const u8, buf: []u8, _parts: anytype, comptime _
// One last normalization, to remove any ../ added
const result = normalizeStringBuf(buf[0..out], parser_buffer[leading_separator.len..parser_buffer.len], false, _platform, false);
std.mem.copy(u8, buf[0..leading_separator.len], leading_separator);
+
std.mem.copy(u8, buf[leading_separator.len .. result.len + leading_separator.len], result);
return buf[0 .. result.len + leading_separator.len];
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 8dc753b83..3f421d6ff 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -85,10 +85,10 @@ pub const DirInfo = struct {
pub const HashMap = allocators.BSSMap(DirInfo, Fs.Preallocate.Counts.dir_entry, false, 128);
};
pub const TemporaryBuffer = struct {
- pub threadlocal var ExtensionPathBuf = std.mem.zeroes([512]u8);
- pub threadlocal var TSConfigMatchStarBuf = std.mem.zeroes([512]u8);
- pub threadlocal var TSConfigMatchPathBuf = std.mem.zeroes([512]u8);
- pub threadlocal var TSConfigMatchFullBuf = std.mem.zeroes([512]u8);
+ pub threadlocal var ExtensionPathBuf: [512]u8 = undefined;
+ pub threadlocal var TSConfigMatchStarBuf: [512]u8 = undefined;
+ pub threadlocal var TSConfigMatchPathBuf: [512]u8 = undefined;
+ pub threadlocal var TSConfigMatchFullBuf: [512]u8 = undefined;
};
pub const PathPair = struct {
@@ -121,6 +121,7 @@ pub const Result = struct {
jsx: options.JSX.Pragma = options.JSX.Pragma{},
package_json_version: ?string = null,
+ package_json_name: ?string = null,
is_external: bool = false,
@@ -281,6 +282,7 @@ pub const MatchResult = struct {
file_fd: StoredFileDescriptorType = 0,
is_node_module: bool = false,
package_json_version: ?string = null,
+ package_json_name: ?string = null,
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
};
@@ -386,11 +388,6 @@ pub fn NewResolver(cache_files: bool) type {
pub fn flushDebugLogs(r: *ThisResolver, flush_mode: DebugLogs.FlushMode) !void {
if (r.debug_logs) |*debug| {
- defer {
- debug.deinit();
- r.debug_logs = null;
- }
-
if (flush_mode == DebugLogs.FlushMode.fail) {
try r.log.addRangeDebugWithNotes(null, logger.Range{ .loc = logger.Loc{} }, debug.what, debug.notes.toOwnedSlice());
} else if (@enumToInt(r.log.level) <= @enumToInt(logger.Log.Level.verbose)) {
@@ -482,9 +479,16 @@ pub fn NewResolver(cache_files: bool) type {
r.mutex.lock();
defer r.mutex.unlock();
- var result = try r.resolveWithoutSymlinks(source_dir, import_path, kind);
+ const result = r.resolveWithoutSymlinks(source_dir, import_path, kind) catch |err| {
+ r.flushDebugLogs(.fail) catch {};
+ return err;
+ };
+
+ defer {
+ if (result == null) r.flushDebugLogs(.fail) catch {} else r.flushDebugLogs(.success) catch {};
+ }
- return result orelse error.ModuleNotFound;
+ return result orelse return error.ModuleNotFound;
}
pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !?Result {
@@ -517,6 +521,8 @@ pub fn NewResolver(cache_files: bool) type {
.diff_case = res.diff_case,
.dirname_fd = dir_info.getFileDescriptor(),
.is_from_node_modules = res.is_node_module,
+ .package_json_name = res.package_json_name,
+ .package_json_version = res.package_json_version,
};
}
}
@@ -545,6 +551,8 @@ pub fn NewResolver(cache_files: bool) type {
.path_pair = entry.path_pair,
.diff_case = entry.diff_case,
.is_from_node_modules = entry.is_node_module,
+ .package_json_name = entry.package_json_name,
+ .package_json_version = entry.package_json_version,
};
}
@@ -603,6 +611,7 @@ pub fn NewResolver(cache_files: bool) type {
.module_type = pkg.module_type,
.dirname_fd = _result.dirname_fd,
.package_json_version = pkg.version,
+ .package_json_name = pkg.name,
};
check_relative = false;
check_package = false;
@@ -621,6 +630,7 @@ pub fn NewResolver(cache_files: bool) type {
.is_from_node_modules = res.is_node_module,
.dirname_fd = res.dirname_fd,
.package_json_version = res.package_json_version,
+ .package_json_name = res.package_json_name,
};
} else if (!check_package) {
return null;
@@ -670,6 +680,7 @@ pub fn NewResolver(cache_files: bool) type {
.diff_case = node_module.diff_case,
.is_from_node_modules = true,
.package_json_version = package_json.version,
+ .package_json_name = package_json.name,
};
}
} else {
@@ -692,6 +703,7 @@ pub fn NewResolver(cache_files: bool) type {
.is_from_node_modules = res.is_node_module,
.dirname_fd = res.dirname_fd,
.package_json_version = res.package_json_version,
+ .package_json_name = res.package_json_name,
};
} else {
// Note: node's "self references" are not currently supported
@@ -708,6 +720,7 @@ pub fn NewResolver(cache_files: bool) type {
const rel_path = r.fs.relative(pkg_json.source.key_path.text, path.text);
result.module_type = pkg_json.module_type;
result.package_json_version = if (result.package_json_version == null) pkg_json.version else result.package_json_version;
+ result.package_json_name = if (result.package_json_name == null) pkg_json.name else result.package_json_name;
if (r.checkBrowserMap(pkg_json, rel_path)) |remapped| {
if (remapped.len == 0) {
path.is_disabled = true;
@@ -1289,6 +1302,7 @@ pub fn NewResolver(cache_files: bool) type {
.primary = _path,
},
.package_json_version = browser_json.version,
+ .package_json_name = browser_json.name,
};
}
@@ -1296,16 +1310,30 @@ pub fn NewResolver(cache_files: bool) type {
}
}
}
- const _paths = [_]string{ field_rel_path, path };
- const field_abs_path = r.fs.absAlloc(r.allocator, &_paths) catch unreachable;
+ const _paths = [_]string{ path, field_rel_path };
+ const field_abs_path = r.fs.abs(&_paths);
+ // Is this a file?
+ if (r.loadAsFile(field_abs_path, extension_order)) |result| {
+ if (dir_info.package_json) |package_json| {
+ return MatchResult{
+ .path_pair = PathPair{ .primary = Fs.Path.init(result.path) },
+ .package_json_name = package_json.name,
+ .package_json_version = package_json.version,
+ };
+ }
+
+ return MatchResult{
+ .path_pair = PathPair{ .primary = Fs.Path.init(result.path) },
+ };
+ }
+
+ // Is it a directory with an index?
const field_dir_info = (r.dirInfoCached(field_abs_path) catch null) orelse {
- r.allocator.free(field_abs_path);
return null;
};
return r.loadAsIndexWithBrowserRemapping(field_dir_info, field_abs_path, extension_order) orelse {
- r.allocator.free(field_abs_path);
return null;
};
}
@@ -1361,6 +1389,7 @@ pub fn NewResolver(cache_files: bool) type {
.primary = _path,
},
.package_json_version = browser_json.version,
+ .package_json_name = browser_json.name,
};
}
@@ -1392,6 +1421,27 @@ pub fn NewResolver(cache_files: bool) type {
// Is this a file?
if (r.loadAsFile(path, extension_order)) |file| {
+ // ServeBundler cares about the package.json
+ if (!cache_files) {
+ // Determine the package folder by looking at the last node_modules/ folder in the path
+ if (strings.lastIndexOf(file.path, "node_modules" ++ std.fs.path.sep_str)) |last_node_modules_folder| {
+ const node_modules_folder_offset = last_node_modules_folder + ("node_modules" ++ std.fs.path.sep_str).len;
+ // Determine the package name by looking at the next separator
+ if (strings.indexOfChar(file.path[node_modules_folder_offset..], std.fs.path.sep)) |package_name_length| {
+ if ((r.dirInfoCached(file.path[0 .. node_modules_folder_offset + package_name_length]) catch null)) |package_dir_info| {
+ if (package_dir_info.package_json) |package_json| {
+ return MatchResult{
+ .path_pair = .{ .primary = Path.init(file.path) },
+ .diff_case = file.diff_case,
+ .dirname_fd = file.dirname_fd,
+ .package_json_name = package_json.name,
+ .package_json_version = package_json.version,
+ };
+ }
+ }
+ }
+ }
+ }
return MatchResult{
.path_pair = .{ .primary = Path.init(file.path) },
.diff_case = file.diff_case,
@@ -1412,10 +1462,12 @@ pub fn NewResolver(cache_files: bool) type {
const dir_info = (r.dirInfoCached(path) catch null) orelse return null;
var package_json_version: ?string = null;
+ var package_json_name: ?string = null;
// Try using the main field(s) from "package.json"
if (dir_info.package_json) |pkg_json| {
package_json_version = pkg_json.version;
+ package_json_name = pkg_json.name;
if (pkg_json.main_fields.count() > 0) {
const main_field_values = pkg_json.main_fields;
const main_field_keys = r.opts.main_fields;
@@ -1434,7 +1486,7 @@ pub fn NewResolver(cache_files: bool) type {
continue;
};
- var _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue;
+ const _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue;
// If the user did not manually configure a "main" field order, then
// use a special per-module automatic algorithm to decide whether to
@@ -1477,6 +1529,7 @@ pub fn NewResolver(cache_files: bool) type {
.diff_case = auto_main_result.diff_case,
.dirname_fd = auto_main_result.dirname_fd,
.package_json_version = pkg_json.version,
+ .package_json_name = pkg_json.name,
};
} else {
if (r.debug_logs) |*debug| {
@@ -1488,10 +1541,13 @@ pub fn NewResolver(cache_files: bool) type {
}
var _auto_main_result = auto_main_result;
_auto_main_result.package_json_version = pkg_json.version;
+ _auto_main_result.package_json_name = pkg_json.name;
return _auto_main_result;
}
}
}
+
+ return _result;
}
}
}
@@ -1501,6 +1557,10 @@ pub fn NewResolver(cache_files: bool) type {
if (res.package_json_version == null and package_json_version != null) {
res.package_json_version = package_json_version;
}
+
+ if (res.package_json_name == null and package_json_name != null) {
+ res.package_json_name = package_json_name;
+ }
return res.*;
}
@@ -1568,7 +1628,6 @@ pub fn NewResolver(cache_files: bool) type {
}
// Try the path with extensions
-
std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, path);
for (r.opts.extension_order) |ext| {
var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. path.len + ext.len];
@@ -1576,7 +1635,7 @@ pub fn NewResolver(cache_files: bool) type {
const file_name = buffer[path.len - base.len .. buffer.len];
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}{s}\" ", .{ base, ext }) catch {};
+ debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer}) catch {};
}
if (entries.get(file_name)) |query| {
diff --git a/src/test/fixtures/export-from.js b/src/test/fixtures/export-from.js
new file mode 100644
index 000000000..aff2d3309
--- /dev/null
+++ b/src/test/fixtures/export-from.js
@@ -0,0 +1,2 @@
+export { foo } from "export-from-target";
+export { foo as bar } from "export-from-target";
diff --git a/src/test/fixtures/nql-define.2.js b/src/test/fixtures/nql-define.2.js
new file mode 100644
index 000000000..fef5e680d
--- /dev/null
+++ b/src/test/fixtures/nql-define.2.js
@@ -0,0 +1,39 @@
+/* eslint-disable no-console */
+var warned = {};
+export function warning(valid, message) {
+ // Support uglify
+ if (
+ process.env.NODE_ENV !== "production" &&
+ !valid &&
+ console !== undefined
+ ) {
+ console.error("Warning: ".concat(message));
+ }
+}
+export function note(valid, message) {
+ // Support uglify
+ if (
+ process.env.NODE_ENV !== "production" &&
+ !valid &&
+ console !== undefined
+ ) {
+ console.warn("Note: ".concat(message));
+ }
+}
+export function resetWarned() {
+ warned = {};
+}
+export function call(method, valid, message) {
+ if (!valid && !warned[message]) {
+ method(false, message);
+ warned[message] = true;
+ }
+}
+export function warningOnce(valid, message) {
+ call(warning, valid, message);
+}
+export function noteOnce(valid, message) {
+ call(note, valid, message);
+}
+export default warningOnce;
+/* eslint-enable */
diff --git a/src/test/fixtures/nql-define.js b/src/test/fixtures/nql-define.js
new file mode 100644
index 000000000..aac715454
--- /dev/null
+++ b/src/test/fixtures/nql-define.js
@@ -0,0 +1,41 @@
+import "./nql-define.2.js";
+
+/* eslint-disable no-console */
+var warned = {};
+export function warning(valid, message) {
+ // Support uglify
+ if (
+ process.env.NODE_ENV !== "production" &&
+ !valid &&
+ console !== undefined
+ ) {
+ console.error("Warning: ".concat(message));
+ }
+}
+export function note(valid, message) {
+ // Support uglify
+ if (
+ process.env.NODE_ENV !== "production" &&
+ !valid &&
+ console !== undefined
+ ) {
+ console.warn("Note: ".concat(message));
+ }
+}
+export function resetWarned() {
+ warned = {};
+}
+export function call(method, valid, message) {
+ if (!valid && !warned[message]) {
+ method(false, message);
+ warned[message] = true;
+ }
+}
+export function warningOnce(valid, message) {
+ call(warning, valid, message);
+}
+export function noteOnce(valid, message) {
+ call(note, valid, message);
+}
+export default warningOnce;
+/* eslint-enable */