aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/api/schema.d.ts22
-rw-r--r--src/api/schema.js220
-rw-r--r--src/api/schema.peechy47
-rw-r--r--src/api/schema.zig1648
-rw-r--r--src/bundler.zig144
-rw-r--r--src/cli.zig42
-rw-r--r--src/fs.zig12
-rw-r--r--src/global.zig39
-rw-r--r--src/import_record.zig5
-rw-r--r--src/js_ast.zig2
-rw-r--r--src/js_parser/js_parser.zig48
-rw-r--r--src/linker.zig96
-rw-r--r--src/node_module_bundle.zig215
-rw-r--r--src/options.zig140
-rw-r--r--src/resolver/package_json.zig18
-rw-r--r--src/resolver/resolve_path.zig78
-rw-r--r--src/resolver/resolver.zig88
-rw-r--r--src/runtime.js32
-rw-r--r--src/runtime.version2
-rw-r--r--src/runtime.zig25
20 files changed, 1823 insertions, 1100 deletions
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index 2162d6901..8d8ae010e 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -137,6 +137,7 @@ type uint32 = number;
path: StringPointer;
code: StringPointer;
package_id: uint32;
+ path_extname_length: byte;
}
export interface JavascriptBundledPackage {
@@ -174,22 +175,30 @@ type uint32 = number;
imports: ModuleImportRecord[];
}
+ export interface StringMap {
+ keys: string[];
+ values: string[];
+ }
+
+ export interface LoaderMap {
+ extensions: string[];
+ loaders: Loader[];
+ }
+
export interface TransformOptions {
jsx?: JSX;
tsconfig_override?: string;
resolve?: ResolveMode;
public_url?: string;
absolute_working_dir?: string;
- define_keys?: string[];
- define_values?: string[];
+ define?: StringMap;
preserve_symlinks?: boolean;
entry_points?: string[];
write?: boolean;
inject?: string[];
output_dir?: string;
external?: string[];
- loader_keys?: string[];
- loader_values?: Loader[];
+ loaders?: LoaderMap;
main_fields?: string[];
platform?: Platform;
serve?: boolean;
@@ -197,6 +206,7 @@ type uint32 = number;
public_dir?: string;
only_scan_dependencies?: ScanDependencyMode;
generate_node_module_bundle?: boolean;
+ node_modules_bundle_path?: string;
}
export interface FileHandle {
@@ -267,6 +277,10 @@ type uint32 = number;
export declare function decodeModuleImportRecord(buffer: ByteBuffer): ModuleImportRecord;
export declare function encodeModule(message: Module, bb: ByteBuffer): void;
export declare function decodeModule(buffer: ByteBuffer): Module;
+ export declare function encodeStringMap(message: StringMap, bb: ByteBuffer): void;
+ export declare function decodeStringMap(buffer: ByteBuffer): StringMap;
+ export declare function encodeLoaderMap(message: LoaderMap, bb: ByteBuffer): void;
+ export declare function decodeLoaderMap(buffer: ByteBuffer): LoaderMap;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
diff --git a/src/api/schema.js b/src/api/schema.js
index 96d3b54fd..3f0bb8179 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -167,6 +167,7 @@ function decodeJavascriptBundledModule(bb) {
result["path"] = decodeStringPointer(bb);
result["code"] = decodeStringPointer(bb);
result["package_id"] = bb.readUint32();
+ result["path_extname_length"] = bb.readByte();
return result;
}
@@ -193,6 +194,13 @@ function encodeJavascriptBundledModule(message, bb) {
throw new Error("Missing required field \"package_id\"");
}
+ var value = message["path_extname_length"];
+ if (value != null) {
+ bb.writeByte(value);
+ } else {
+ throw new Error("Missing required field \"path_extname_length\"");
+ }
+
}
function decodeJavascriptBundledPackage(bb) {
@@ -467,6 +475,88 @@ function encodeModule(message, bb) {
}
+function decodeStringMap(bb) {
+ var result = {};
+
+ var length = bb.readVarUint();
+ var values = result["keys"] = Array(length);
+ for (var i = 0; i < length; i++) values[i] = bb.readString();
+ var length = bb.readVarUint();
+ var values = result["values"] = Array(length);
+ for (var i = 0; i < length; i++) values[i] = bb.readString();
+ return result;
+}
+
+function encodeStringMap(message, bb) {
+
+ var value = message["keys"];
+ if (value != null) {
+ var values = value, n = values.length;
+ bb.writeVarUint(n);
+ for (var i = 0; i < n; i++) {
+ value = values[i];
+ bb.writeString(value);
+ }
+ } else {
+ throw new Error("Missing required field \"keys\"");
+ }
+
+ var value = message["values"];
+ if (value != null) {
+ var values = value, n = values.length;
+ bb.writeVarUint(n);
+ for (var i = 0; i < n; i++) {
+ value = values[i];
+ bb.writeString(value);
+ }
+ } else {
+ throw new Error("Missing required field \"values\"");
+ }
+
+}
+
+function decodeLoaderMap(bb) {
+ var result = {};
+
+ var length = bb.readVarUint();
+ var values = result["extensions"] = Array(length);
+ for (var i = 0; i < length; i++) values[i] = bb.readString();
+ var length = bb.readVarUint();
+ var values = result["loaders"] = Array(length);
+ for (var i = 0; i < length; i++) values[i] = Loader[bb.readByte()];
+ return result;
+}
+
+function encodeLoaderMap(message, bb) {
+
+ var value = message["extensions"];
+ if (value != null) {
+ var values = value, n = values.length;
+ bb.writeVarUint(n);
+ for (var i = 0; i < n; i++) {
+ value = values[i];
+ bb.writeString(value);
+ }
+ } else {
+ throw new Error("Missing required field \"extensions\"");
+ }
+
+ var value = message["loaders"];
+ if (value != null) {
+ var values = value, n = values.length;
+ bb.writeVarUint(n);
+ for (var i = 0; i < n; i++) {
+ value = values[i];
+ var encoded = Loader[value];
+if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
+bb.writeByte(encoded);
+ }
+ } else {
+ throw new Error("Missing required field \"loaders\"");
+ }
+
+}
+
function decodeTransformOptions(bb) {
var result = {};
@@ -496,91 +586,79 @@ function decodeTransformOptions(bb) {
break;
case 6:
- var length = bb.readVarUint();
- var values = result["define_keys"] = Array(length);
- for (var i = 0; i < length; i++) values[i] = bb.readString();
+ result["define"] = decodeStringMap(bb);
break;
case 7:
- var length = bb.readVarUint();
- var values = result["define_values"] = Array(length);
- for (var i = 0; i < length; i++) values[i] = bb.readString();
- break;
-
- case 8:
result["preserve_symlinks"] = !!bb.readByte();
break;
- case 9:
+ case 8:
var length = bb.readVarUint();
var values = result["entry_points"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
- case 10:
+ case 9:
result["write"] = !!bb.readByte();
break;
- case 11:
+ case 10:
var length = bb.readVarUint();
var values = result["inject"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
- case 12:
+ case 11:
result["output_dir"] = bb.readString();
break;
- case 13:
+ case 12:
var length = bb.readVarUint();
var values = result["external"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
- case 14:
- var length = bb.readVarUint();
- var values = result["loader_keys"] = Array(length);
- for (var i = 0; i < length; i++) values[i] = bb.readString();
- break;
-
- case 15:
- var length = bb.readVarUint();
- var values = result["loader_values"] = Array(length);
- for (var i = 0; i < length; i++) values[i] = Loader[bb.readByte()];
+ case 13:
+ result["loaders"] = decodeLoaderMap(bb);
break;
- case 16:
+ case 14:
var length = bb.readVarUint();
var values = result["main_fields"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
- case 17:
+ case 15:
result["platform"] = Platform[bb.readByte()];
break;
- case 18:
+ case 16:
result["serve"] = !!bb.readByte();
break;
- case 19:
+ case 17:
var length = bb.readVarUint();
var values = result["extension_order"] = Array(length);
for (var i = 0; i < length; i++) values[i] = bb.readString();
break;
- case 20:
+ case 18:
result["public_dir"] = bb.readString();
break;
- case 21:
+ case 19:
result["only_scan_dependencies"] = ScanDependencyMode[bb.readByte()];
break;
- case 22:
+ case 20:
result["generate_node_module_bundle"] = !!bb.readByte();
break;
+ case 21:
+ result["node_modules_bundle_path"] = bb.readString();
+ break;
+
default:
throw new Error("Attempted to parse invalid message");
}
@@ -621,37 +699,21 @@ bb.writeByte(encoded);
bb.writeString(value);
}
- var value = message["define_keys"];
+ var value = message["define"];
if (value != null) {
bb.writeByte(6);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0; i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
-
- var value = message["define_values"];
- if (value != null) {
- bb.writeByte(7);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0; i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
+ encodeStringMap(value, bb);
}
var value = message["preserve_symlinks"];
if (value != null) {
- bb.writeByte(8);
+ bb.writeByte(7);
bb.writeByte(value);
}
var value = message["entry_points"];
if (value != null) {
- bb.writeByte(9);
+ bb.writeByte(8);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
@@ -662,13 +724,13 @@ bb.writeByte(encoded);
var value = message["write"];
if (value != null) {
- bb.writeByte(10);
+ bb.writeByte(9);
bb.writeByte(value);
}
var value = message["inject"];
if (value != null) {
- bb.writeByte(11);
+ bb.writeByte(10);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
@@ -679,24 +741,13 @@ bb.writeByte(encoded);
var value = message["output_dir"];
if (value != null) {
- bb.writeByte(12);
+ bb.writeByte(11);
bb.writeString(value);
}
var value = message["external"];
if (value != null) {
- bb.writeByte(13);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0; i < n; i++) {
- value = values[i];
- bb.writeString(value);
- }
- }
-
- var value = message["loader_keys"];
- if (value != null) {
- bb.writeByte(14);
+ bb.writeByte(12);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
@@ -705,22 +756,15 @@ bb.writeByte(encoded);
}
}
- var value = message["loader_values"];
+ var value = message["loaders"];
if (value != null) {
- bb.writeByte(15);
- var values = value, n = values.length;
- bb.writeVarUint(n);
- for (var i = 0; i < n; i++) {
- value = values[i];
- var encoded = Loader[value];
-if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
-bb.writeByte(encoded);
- }
+ bb.writeByte(13);
+ encodeLoaderMap(value, bb);
}
var value = message["main_fields"];
if (value != null) {
- bb.writeByte(16);
+ bb.writeByte(14);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
@@ -731,7 +775,7 @@ bb.writeByte(encoded);
var value = message["platform"];
if (value != null) {
- bb.writeByte(17);
+ bb.writeByte(15);
var encoded = Platform[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Platform\"");
bb.writeByte(encoded);
@@ -739,13 +783,13 @@ bb.writeByte(encoded);
var value = message["serve"];
if (value != null) {
- bb.writeByte(18);
+ bb.writeByte(16);
bb.writeByte(value);
}
var value = message["extension_order"];
if (value != null) {
- bb.writeByte(19);
+ bb.writeByte(17);
var values = value, n = values.length;
bb.writeVarUint(n);
for (var i = 0; i < n; i++) {
@@ -756,13 +800,13 @@ bb.writeByte(encoded);
var value = message["public_dir"];
if (value != null) {
- bb.writeByte(20);
+ bb.writeByte(18);
bb.writeString(value);
}
var value = message["only_scan_dependencies"];
if (value != null) {
- bb.writeByte(21);
+ bb.writeByte(19);
var encoded = ScanDependencyMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"ScanDependencyMode\"");
bb.writeByte(encoded);
@@ -770,9 +814,15 @@ bb.writeByte(encoded);
var value = message["generate_node_module_bundle"];
if (value != null) {
- bb.writeByte(22);
+ bb.writeByte(20);
bb.writeByte(value);
}
+
+ var value = message["node_modules_bundle_path"];
+ if (value != null) {
+ bb.writeByte(21);
+ bb.writeString(value);
+ }
bb.writeByte(0);
}
@@ -1207,6 +1257,10 @@ export { decodeModuleImportRecord }
export { encodeModuleImportRecord }
export { decodeModule }
export { encodeModule }
+export { decodeStringMap }
+export { encodeStringMap }
+export { decodeLoaderMap }
+export { encodeLoaderMap }
export { decodeTransformOptions }
export { encodeTransformOptions }
export { decodeFileHandle }
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index 1c21c7d5e..f893c525b 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -49,7 +49,9 @@ struct JavascriptBundledModule {
StringPointer path;
StringPointer code;
uint32 package_id;
-
+ // This lets us efficiently compare strings ignoring the extension
+ // If we instead omit the extension
+ byte path_extname_length;
}
struct JavascriptBundledPackage {
@@ -105,6 +107,16 @@ struct Module {
ModuleImportRecord[] imports;
}
+struct StringMap {
+ string[] keys;
+ string[] values;
+}
+
+struct LoaderMap {
+ string[] extensions;
+ Loader[] loaders;
+}
+
message TransformOptions {
JSX jsx = 1;
string tsconfig_override = 2;
@@ -113,35 +125,34 @@ message TransformOptions {
string public_url = 4;
string absolute_working_dir = 5;
+ StringMap define = 6;
- string[] define_keys = 6;
- string[] define_values = 7;
+ bool preserve_symlinks = 7;
- bool preserve_symlinks = 8;
+ string[] entry_points = 8;
+ bool write = 9;
- string[] entry_points = 9;
- bool write = 10;
+ string[] inject = 10;
+ string output_dir = 11;
- string[] inject = 11;
- string output_dir = 12;
+ string[] external = 12;
- string[] external = 13;
+ LoaderMap loaders = 13;
- string[] loader_keys = 14;
- Loader[] loader_values = 15;
+ string[] main_fields = 14;
+ Platform platform = 15;
- string[] main_fields = 16;
- Platform platform = 17;
+ bool serve = 16;
- bool serve = 18;
+ string[] extension_order = 17;
- string[] extension_order = 19;
+ string public_dir = 18;
- string public_dir = 20;
+ ScanDependencyMode only_scan_dependencies = 19;
- ScanDependencyMode only_scan_dependencies = 21;
+ bool generate_node_module_bundle = 20;
- bool generate_node_module_bundle = 22;
+ string node_modules_bundle_path = 21;
}
struct FileHandle {
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 0e7cb6fcc..4a2e44f9b 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -1,3 +1,4 @@
+
const std = @import("std");
pub const Reader = struct {
@@ -58,7 +59,7 @@ pub const Reader = struct {
return E.InvalidValue;
}
- pub fn readArray(this: *Self, comptime T: type) ![]T {
+ pub fn readArray(this: *Self, comptime T: type) ![]const T {
const length = try this.readInt(u32);
if (length == 0) {
return &([_]T{});
@@ -92,7 +93,8 @@ pub const Reader = struct {
}
},
.Enum => |type_info| {
- return std.meta.cast([]T, std.mem.readIntSliceNative(type_info.tag_type, try this.read(length * @sizeOf(type_info.tag_type))));
+ const enum_values = try this.read(length * @sizeOf(type_info.tag_type));
+ return @ptrCast([*]T, enum_values.ptr)[0..length];
},
else => {},
}
@@ -136,6 +138,10 @@ pub const Reader = struct {
return try this.readByte();
},
[]const u8 => {
+ return try this.readArray(u8);
+ },
+
+ []const []const u8 => {
return try this.readArray([]const u8);
},
[]u8 => {
@@ -274,955 +280,1053 @@ pub fn Writer(comptime WritableStream: type) type {
pub const ByteWriter = Writer(std.io.FixedBufferStream([]u8));
pub const FileWriter = Writer(std.fs.File);
-pub const Api = struct {
- pub const Loader = enum(u8) {
- _none,
- /// jsx
- jsx,
- /// js
- js,
- /// ts
- ts,
- /// tsx
- tsx,
+pub const Api = struct {
- /// css
- css,
+pub const Loader = enum(u8) {
- /// file
- file,
+_none,
+ /// jsx
+ jsx,
- /// json
- json,
+ /// js
+ js,
- _,
+ /// ts
+ ts,
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+ /// tsx
+ tsx,
- pub const ResolveMode = enum(u8) {
- _none,
- /// disable
- disable,
+ /// css
+ css,
- /// lazy
- lazy,
+ /// file
+ file,
- /// dev
- dev,
+ /// json
+ json,
- /// bundle
- bundle,
+_,
- _,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+
+};
- pub const Platform = enum(u8) {
- _none,
- /// browser
- browser,
+pub const ResolveMode = enum(u8) {
- /// node
- node,
+_none,
+ /// disable
+ disable,
- _,
+ /// lazy
+ lazy,
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+ /// dev
+ dev,
- pub const JsxRuntime = enum(u8) {
- _none,
- /// automatic
- automatic,
+ /// bundle
+ bundle,
- /// classic
- classic,
+_,
- _,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+
+};
- pub const Jsx = struct {
- /// factory
- factory: []const u8,
+pub const Platform = enum(u8) {
- /// runtime
- runtime: JsxRuntime,
+_none,
+ /// browser
+ browser,
- /// fragment
- fragment: []const u8,
+ /// node
+ node,
- /// development
- development: bool = false,
+_,
- /// import_source
- import_source: []const u8,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- /// react_fast_refresh
- react_fast_refresh: bool = false,
+
+};
- pub fn decode(reader: anytype) anyerror!Jsx {
- var this = std.mem.zeroes(Jsx);
+pub const JsxRuntime = enum(u8) {
- this.factory = try reader.readValue([]const u8);
- this.runtime = try reader.readValue(JsxRuntime);
- this.fragment = try reader.readValue([]const u8);
- this.development = try reader.readValue(bool);
- this.import_source = try reader.readValue([]const u8);
- this.react_fast_refresh = try reader.readValue(bool);
- return this;
- }
+_none,
+ /// automatic
+ automatic,
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeValue(this.factory);
- try writer.writeEnum(this.runtime);
- try writer.writeValue(this.fragment);
- try writer.writeInt(@intCast(u8, @boolToInt(this.development)));
- try writer.writeValue(this.import_source);
- try writer.writeInt(@intCast(u8, @boolToInt(this.react_fast_refresh)));
- }
- };
+ /// classic
+ classic,
- pub const StringPointer = packed struct {
- /// offset
- offset: u32 = 0,
+_,
- /// length
- length: u32 = 0,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- pub fn decode(reader: anytype) anyerror!StringPointer {
- var this = std.mem.zeroes(StringPointer);
+
+};
- this.offset = try reader.readValue(u32);
- this.length = try reader.readValue(u32);
- return this;
- }
+pub const Jsx = struct {
+/// factory
+factory: []const u8,
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeInt(this.offset);
- try writer.writeInt(this.length);
- }
- };
+/// runtime
+runtime: JsxRuntime,
- pub const JavascriptBundledModule = struct {
- /// path
- path: StringPointer,
+/// fragment
+fragment: []const u8,
- /// code
- code: StringPointer,
+/// development
+development: bool = false,
- /// package_id
- package_id: u32 = 0,
+/// import_source
+import_source: []const u8,
- pub fn decode(reader: anytype) anyerror!JavascriptBundledModule {
- var this = std.mem.zeroes(JavascriptBundledModule);
+/// react_fast_refresh
+react_fast_refresh: bool = false,
- this.path = try reader.readValue(StringPointer);
- this.code = try reader.readValue(StringPointer);
- this.package_id = try reader.readValue(u32);
- return this;
- }
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeValue(this.path);
- try writer.writeValue(this.code);
- try writer.writeInt(this.package_id);
- }
- };
+pub fn decode(reader: anytype) anyerror!Jsx {
+ var this = std.mem.zeroes(Jsx);
- pub const JavascriptBundledPackage = struct {
- /// name
- name: StringPointer,
+ this.factory = try reader.readValue([]const u8);
+ this.runtime = try reader.readValue(JsxRuntime);
+ this.fragment = try reader.readValue([]const u8);
+ this.development = try reader.readValue(bool);
+ this.import_source = try reader.readValue([]const u8);
+ this.react_fast_refresh = try reader.readValue(bool);
+ return this;
+}
- /// version
- version: StringPointer,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(this.factory);
+ try writer.writeEnum(this.runtime);
+ try writer.writeValue(this.fragment);
+ try writer.writeInt(@intCast(u8, @boolToInt(this.development)));
+ try writer.writeValue(this.import_source);
+ try writer.writeInt(@intCast(u8, @boolToInt(this.react_fast_refresh)));
+}
- /// hash
- hash: u32 = 0,
+};
- /// modules_offset
- modules_offset: u32 = 0,
+pub const StringPointer = packed struct {
+/// offset
+offset: u32 = 0,
- /// modules_length
- modules_length: u32 = 0,
+/// length
+length: u32 = 0,
- pub fn decode(reader: anytype) anyerror!JavascriptBundledPackage {
- var this = std.mem.zeroes(JavascriptBundledPackage);
- this.name = try reader.readValue(StringPointer);
- this.version = try reader.readValue(StringPointer);
- this.hash = try reader.readValue(u32);
- this.modules_offset = try reader.readValue(u32);
- this.modules_length = try reader.readValue(u32);
- return this;
- }
+pub fn decode(reader: anytype) anyerror!StringPointer {
+ var this = std.mem.zeroes(StringPointer);
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeValue(this.name);
- try writer.writeValue(this.version);
- try writer.writeInt(this.hash);
- try writer.writeInt(this.modules_offset);
- try writer.writeInt(this.modules_length);
- }
- };
+ this.offset = try reader.readValue(u32);
+ this.length = try reader.readValue(u32);
+ return this;
+}
- pub const JavascriptBundle = struct {
- /// modules
- modules: []JavascriptBundledModule,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.offset);
+ try writer.writeInt(this.length);
+}
- /// packages
- packages: []JavascriptBundledPackage,
+};
- /// etag
- etag: []const u8,
+pub const JavascriptBundledModule = struct {
+/// path
+path: StringPointer,
- /// generated_at
- generated_at: u32 = 0,
+/// code
+code: StringPointer,
- /// app_package_json_dependencies_hash
- app_package_json_dependencies_hash: []const u8,
+/// package_id
+package_id: u32 = 0,
- /// import_from_name
- import_from_name: []const u8,
+/// path_extname_length
+path_extname_length: u8 = 0,
- /// manifest_string
- manifest_string: []const u8,
- pub fn decode(reader: anytype) anyerror!JavascriptBundle {
- var this = std.mem.zeroes(JavascriptBundle);
+pub fn decode(reader: anytype) anyerror!JavascriptBundledModule {
+ var this = std.mem.zeroes(JavascriptBundledModule);
- this.modules = try reader.readArray(JavascriptBundledModule);
- this.packages = try reader.readArray(JavascriptBundledPackage);
- this.etag = try reader.readArray(u8);
- this.generated_at = try reader.readValue(u32);
- this.app_package_json_dependencies_hash = try reader.readArray(u8);
- this.import_from_name = try reader.readArray(u8);
- this.manifest_string = try reader.readArray(u8);
- return this;
- }
+ this.path = try reader.readValue(StringPointer);
+ this.code = try reader.readValue(StringPointer);
+ this.package_id = try reader.readValue(u32);
+ this.path_extname_length = try reader.readValue(u8);
+ return this;
+}
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeArray(JavascriptBundledModule, this.modules);
- try writer.writeArray(JavascriptBundledPackage, this.packages);
- try writer.writeArray(u8, this.etag);
- try writer.writeInt(this.generated_at);
- try writer.writeArray(u8, this.app_package_json_dependencies_hash);
- try writer.writeArray(u8, this.import_from_name);
- try writer.writeArray(u8, this.manifest_string);
- }
- };
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(this.path);
+ try writer.writeValue(this.code);
+ try writer.writeInt(this.package_id);
+ try writer.writeInt(this.path_extname_length);
+}
- pub const JavascriptBundleContainer = struct {
- /// bundle_format_version
- bundle_format_version: ?u32 = null,
+};
- /// bundle
- bundle: ?JavascriptBundle = null,
+pub const JavascriptBundledPackage = struct {
+/// name
+name: StringPointer,
- /// code_length
- code_length: ?u32 = null,
+/// version
+version: StringPointer,
- pub fn decode(reader: anytype) anyerror!JavascriptBundleContainer {
- var this = std.mem.zeroes(JavascriptBundleContainer);
+/// hash
+hash: u32 = 0,
- while (true) {
- switch (try reader.readByte()) {
- 0 => {
- return this;
- },
+/// modules_offset
+modules_offset: u32 = 0,
- 1 => {
- this.bundle_format_version = try reader.readValue(u32);
- },
- 2 => {
- this.bundle = try reader.readValue(JavascriptBundle);
- },
- 3 => {
- this.code_length = try reader.readValue(u32);
- },
- else => {
- return error.InvalidMessage;
- },
- }
- }
- unreachable;
- }
+/// modules_length
+modules_length: u32 = 0,
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- if (this.bundle_format_version) |bundle_format_version| {
- try writer.writeFieldID(1);
- try writer.writeInt(bundle_format_version);
- }
- if (this.bundle) |bundle| {
- try writer.writeFieldID(2);
- try writer.writeValue(bundle);
- }
- if (this.code_length) |code_length| {
- try writer.writeFieldID(3);
- try writer.writeInt(code_length);
- }
- try writer.endMessage();
- }
- };
- pub const ScanDependencyMode = enum(u8) {
- _none,
- /// app
- app,
+pub fn decode(reader: anytype) anyerror!JavascriptBundledPackage {
+ var this = std.mem.zeroes(JavascriptBundledPackage);
- /// all
- all,
+ this.name = try reader.readValue(StringPointer);
+ this.version = try reader.readValue(StringPointer);
+ this.hash = try reader.readValue(u32);
+ this.modules_offset = try reader.readValue(u32);
+ this.modules_length = try reader.readValue(u32);
+ return this;
+}
- _,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(this.name);
+ try writer.writeValue(this.version);
+ try writer.writeInt(this.hash);
+ try writer.writeInt(this.modules_offset);
+ try writer.writeInt(this.modules_length);
+}
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+};
- pub const ModuleImportType = enum(u8) {
- _none,
- /// import
- import,
+pub const JavascriptBundle = struct {
+/// modules
+modules: []const JavascriptBundledModule,
- /// require
- require,
+/// packages
+packages: []const JavascriptBundledPackage,
- _,
+/// etag
+etag: []const u8,
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+/// generated_at
+generated_at: u32 = 0,
- pub const ModuleImportRecord = struct {
- /// kind
- kind: ModuleImportType,
+/// app_package_json_dependencies_hash
+app_package_json_dependencies_hash: []const u8,
- /// path
- path: []const u8,
+/// import_from_name
+import_from_name: []const u8,
- /// dynamic
- dynamic: bool = false,
+/// manifest_string
+manifest_string: []const u8,
- pub fn decode(reader: anytype) anyerror!ModuleImportRecord {
- var this = std.mem.zeroes(ModuleImportRecord);
- this.kind = try reader.readValue(ModuleImportType);
- this.path = try reader.readValue([]const u8);
- this.dynamic = try reader.readValue(bool);
- return this;
- }
+pub fn decode(reader: anytype) anyerror!JavascriptBundle {
+ var this = std.mem.zeroes(JavascriptBundle);
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeEnum(this.kind);
- try writer.writeValue(this.path);
- try writer.writeInt(@intCast(u8, @boolToInt(this.dynamic)));
- }
- };
+ this.modules = try reader.readArray(JavascriptBundledModule);
+ this.packages = try reader.readArray(JavascriptBundledPackage);
+ this.etag = try reader.readArray(u8);
+ this.generated_at = try reader.readValue(u32);
+ this.app_package_json_dependencies_hash = try reader.readArray(u8);
+ this.import_from_name = try reader.readArray(u8);
+ this.manifest_string = try reader.readArray(u8);
+ return this;
+}
- pub const Module = struct {
- /// path
- path: []const u8,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeArray(JavascriptBundledModule, this.modules);
+ try writer.writeArray(JavascriptBundledPackage, this.packages);
+ try writer.writeArray(u8, this.etag);
+ try writer.writeInt(this.generated_at);
+ try writer.writeArray(u8, this.app_package_json_dependencies_hash);
+ try writer.writeArray(u8, this.import_from_name);
+ try writer.writeArray(u8, this.manifest_string);
+}
- /// imports
- imports: []ModuleImportRecord,
+};
- pub fn decode(reader: anytype) anyerror!Module {
- var this = std.mem.zeroes(Module);
+pub const JavascriptBundleContainer = struct {
+/// bundle_format_version
+bundle_format_version: ?u32 = null,
+
+/// bundle
+bundle: ?JavascriptBundle = null,
+
+/// code_length
+code_length: ?u32 = null,
+
+
+pub fn decode(reader: anytype) anyerror!JavascriptBundleContainer {
+ var this = std.mem.zeroes(JavascriptBundleContainer);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.bundle_format_version = try reader.readValue(u32);
+},
+ 2 => {
+ this.bundle = try reader.readValue(JavascriptBundle);
+},
+ 3 => {
+ this.code_length = try reader.readValue(u32);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
- this.path = try reader.readValue([]const u8);
- this.imports = try reader.readArray(ModuleImportRecord);
- return this;
- }
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.bundle_format_version) |bundle_format_version| {
+ try writer.writeFieldID(1);
+ try writer.writeInt(bundle_format_version);
+}
+if (this.bundle) |bundle| {
+ try writer.writeFieldID(2);
+ try writer.writeValue(bundle);
+}
+if (this.code_length) |code_length| {
+ try writer.writeFieldID(3);
+ try writer.writeInt(code_length);
+}
+try writer.endMessage();
+}
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeValue(this.path);
- try writer.writeArray(ModuleImportRecord, this.imports);
- }
- };
+};
- pub const TransformOptions = struct {
- /// jsx
- jsx: ?Jsx = null,
+pub const ScanDependencyMode = enum(u8) {
- /// tsconfig_override
- tsconfig_override: ?[]const u8 = null,
+_none,
+ /// app
+ app,
- /// resolve
- resolve: ?ResolveMode = null,
+ /// all
+ all,
- /// public_url
- public_url: ?[]const u8 = null,
+_,
- /// absolute_working_dir
- absolute_working_dir: ?[]const u8 = null,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- /// define_keys
- define_keys: []const []const u8,
+
+};
- /// define_values
- define_values: []const []const u8,
+pub const ModuleImportType = enum(u8) {
- /// preserve_symlinks
- preserve_symlinks: ?bool = null,
+_none,
+ /// import
+ import,
- /// entry_points
- entry_points: []const []const u8,
+ /// require
+ require,
- /// write
- write: ?bool = null,
+_,
- /// inject
- inject: []const []const u8,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- /// output_dir
- output_dir: ?[]const u8 = null,
+
+};
- /// external
- external: []const []const u8,
+pub const ModuleImportRecord = struct {
+/// kind
+kind: ModuleImportType,
- /// loader_keys
- loader_keys: []const []const u8,
+/// path
+path: []const u8,
- /// loader_values
- loader_values: []const Loader,
+/// dynamic
+dynamic: bool = false,
- /// main_fields
- main_fields: []const []const u8,
- /// platform
- platform: ?Platform = null,
+pub fn decode(reader: anytype) anyerror!ModuleImportRecord {
+ var this = std.mem.zeroes(ModuleImportRecord);
- /// serve
- serve: ?bool = null,
+ this.kind = try reader.readValue(ModuleImportType);
+ this.path = try reader.readValue([]const u8);
+ this.dynamic = try reader.readValue(bool);
+ return this;
+}
- /// extension_order
- extension_order: []const []const u8,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeEnum(this.kind);
+ try writer.writeValue(this.path);
+ try writer.writeInt(@intCast(u8, @boolToInt(this.dynamic)));
+}
- /// public_dir
- public_dir: ?[]const u8 = null,
+};
- /// only_scan_dependencies
- only_scan_dependencies: ?ScanDependencyMode = null,
+pub const Module = struct {
+/// path
+path: []const u8,
- /// generate_node_module_bundle
- generate_node_module_bundle: ?bool = null,
+/// imports
+imports: []const ModuleImportRecord,
- pub fn decode(reader: anytype) anyerror!TransformOptions {
- var this = std.mem.zeroes(TransformOptions);
- while (true) {
- switch (try reader.readByte()) {
- 0 => {
- return this;
- },
+pub fn decode(reader: anytype) anyerror!Module {
+ var this = std.mem.zeroes(Module);
- 1 => {
- this.jsx = try reader.readValue(Jsx);
- },
- 2 => {
- this.tsconfig_override = try reader.readValue([]const u8);
- },
- 3 => {
- this.resolve = try reader.readValue(ResolveMode);
- },
- 4 => {
- this.public_url = try reader.readValue([]const u8);
- },
- 5 => {
- this.absolute_working_dir = try reader.readValue([]const u8);
- },
- 6 => {
- this.define_keys = try reader.readArray([]const u8);
- },
- 7 => {
- this.define_values = try reader.readArray([]const u8);
- },
- 8 => {
- this.preserve_symlinks = try reader.readValue(bool);
- },
- 9 => {
- this.entry_points = try reader.readArray([]const u8);
- },
- 10 => {
- this.write = try reader.readValue(bool);
- },
- 11 => {
- this.inject = try reader.readArray([]const u8);
- },
- 12 => {
- this.output_dir = try reader.readValue([]const u8);
- },
- 13 => {
- this.external = try reader.readArray([]const u8);
- },
- 14 => {
- this.loader_keys = try reader.readArray([]const u8);
- },
- 15 => {
- this.loader_values = try reader.readArray(Loader);
- },
- 16 => {
- this.main_fields = try reader.readArray([]const u8);
- },
- 17 => {
- this.platform = try reader.readValue(Platform);
- },
- 18 => {
- this.serve = try reader.readValue(bool);
- },
- 19 => {
- this.extension_order = try reader.readArray([]const u8);
- },
- 20 => {
- this.public_dir = try reader.readValue([]const u8);
- },
- 21 => {
- this.only_scan_dependencies = try reader.readValue(ScanDependencyMode);
- },
- 22 => {
- this.generate_node_module_bundle = try reader.readValue(bool);
- },
- else => {
- return error.InvalidMessage;
- },
- }
- }
- unreachable;
- }
+ this.path = try reader.readValue([]const u8);
+ this.imports = try reader.readArray(ModuleImportRecord);
+ return this;
+}
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- if (this.jsx) |jsx| {
- try writer.writeFieldID(1);
- try writer.writeValue(jsx);
- }
- if (this.tsconfig_override) |tsconfig_override| {
- try writer.writeFieldID(2);
- try writer.writeValue(tsconfig_override);
- }
- if (this.resolve) |resolve| {
- try writer.writeFieldID(3);
- try writer.writeEnum(resolve);
- }
- if (this.public_url) |public_url| {
- try writer.writeFieldID(4);
- try writer.writeValue(public_url);
- }
- if (this.absolute_working_dir) |absolute_working_dir| {
- try writer.writeFieldID(5);
- try writer.writeValue(absolute_working_dir);
- }
- if (this.define_keys) |define_keys| {
- try writer.writeFieldID(6);
- try writer.writeArray([]const u8, define_keys);
- }
- if (this.define_values) |define_values| {
- try writer.writeFieldID(7);
- try writer.writeArray([]const u8, define_values);
- }
- if (this.preserve_symlinks) |preserve_symlinks| {
- try writer.writeFieldID(8);
- try writer.writeInt(@intCast(u8, @boolToInt(preserve_symlinks)));
- }
- if (this.entry_points) |entry_points| {
- try writer.writeFieldID(9);
- try writer.writeArray([]const u8, entry_points);
- }
- if (this.write) |write| {
- try writer.writeFieldID(10);
- try writer.writeInt(@intCast(u8, @boolToInt(write)));
- }
- if (this.inject) |inject| {
- try writer.writeFieldID(11);
- try writer.writeArray([]const u8, inject);
- }
- if (this.output_dir) |output_dir| {
- try writer.writeFieldID(12);
- try writer.writeValue(output_dir);
- }
- if (this.external) |external| {
- try writer.writeFieldID(13);
- try writer.writeArray([]const u8, external);
- }
- if (this.loader_keys) |loader_keys| {
- try writer.writeFieldID(14);
- try writer.writeArray([]const u8, loader_keys);
- }
- if (this.loader_values) |loader_values| {
- try writer.writeFieldID(15);
- try writer.writeArray(Loader, loader_values);
- }
- if (this.main_fields) |main_fields| {
- try writer.writeFieldID(16);
- try writer.writeArray([]const u8, main_fields);
- }
- if (this.platform) |platform| {
- try writer.writeFieldID(17);
- try writer.writeEnum(platform);
- }
- if (this.serve) |serve| {
- try writer.writeFieldID(18);
- try writer.writeInt(@intCast(u8, @boolToInt(serve)));
- }
- if (this.extension_order) |extension_order| {
- try writer.writeFieldID(19);
- try writer.writeArray([]const u8, extension_order);
- }
- if (this.public_dir) |public_dir| {
- try writer.writeFieldID(20);
- try writer.writeValue(public_dir);
- }
- if (this.only_scan_dependencies) |only_scan_dependencies| {
- try writer.writeFieldID(21);
- try writer.writeEnum(only_scan_dependencies);
- }
- if (this.generate_node_module_bundle) |generate_node_module_bundle| {
- try writer.writeFieldID(22);
- try writer.writeInt(@intCast(u8, @boolToInt(generate_node_module_bundle)));
- }
- try writer.endMessage();
- }
- };
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(this.path);
+ try writer.writeArray(ModuleImportRecord, this.imports);
+}
- pub const FileHandle = struct {
- /// path
- path: []const u8,
+};
- /// size
- size: u32 = 0,
+pub const StringMap = struct {
+/// keys
+keys: []const []const u8,
- /// fd
- fd: u32 = 0,
+/// values
+values: []const []const u8,
- pub fn decode(reader: anytype) anyerror!FileHandle {
- var this = std.mem.zeroes(FileHandle);
- this.path = try reader.readValue([]const u8);
- this.size = try reader.readValue(u32);
- this.fd = try reader.readValue(u32);
- return this;
- }
+pub fn decode(reader: anytype) anyerror!StringMap {
+ var this = std.mem.zeroes(StringMap);
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeValue(this.path);
- try writer.writeInt(this.size);
- try writer.writeInt(this.fd);
- }
- };
+ this.keys = try reader.readArray([]const u8);
+ this.values = try reader.readArray([]const u8);
+ return this;
+}
- pub const Transform = struct {
- /// handle
- handle: ?FileHandle = null,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeArray([]const u8, this.keys);
+ try writer.writeArray([]const u8, this.values);
+}
- /// path
- path: ?[]const u8 = null,
+};
- /// contents
- contents: []const u8,
+pub const LoaderMap = struct {
+/// extensions
+extensions: []const []const u8,
- /// loader
- loader: ?Loader = null,
+/// loaders
+loaders: []const Loader,
- /// options
- options: ?TransformOptions = null,
- pub fn decode(reader: anytype) anyerror!Transform {
- var this = std.mem.zeroes(Transform);
+pub fn decode(reader: anytype) anyerror!LoaderMap {
+ var this = std.mem.zeroes(LoaderMap);
- while (true) {
- switch (try reader.readByte()) {
- 0 => {
- return this;
- },
+ this.extensions = try reader.readArray([]const u8);
+ this.loaders = try reader.readArray(Loader);
+ return this;
+}
- 1 => {
- this.handle = try reader.readValue(FileHandle);
- },
- 2 => {
- this.path = try reader.readValue([]const u8);
- },
- 3 => {
- this.contents = try reader.readArray(u8);
- },
- 4 => {
- this.loader = try reader.readValue(Loader);
- },
- 5 => {
- this.options = try reader.readValue(TransformOptions);
- },
- else => {
- return error.InvalidMessage;
- },
- }
- }
- unreachable;
- }
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeArray([]const u8, this.extensions);
+ try writer.writeArray(Loader, this.loaders);
+}
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- if (this.handle) |handle| {
- try writer.writeFieldID(1);
- try writer.writeValue(handle);
- }
- if (this.path) |path| {
- try writer.writeFieldID(2);
- try writer.writeValue(path);
- }
- if (this.contents) |contents| {
- try writer.writeFieldID(3);
- try writer.writeArray(u8, contents);
- }
- if (this.loader) |loader| {
- try writer.writeFieldID(4);
- try writer.writeEnum(loader);
- }
- if (this.options) |options| {
- try writer.writeFieldID(5);
- try writer.writeValue(options);
- }
- try writer.endMessage();
- }
- };
+};
+
+pub const TransformOptions = struct {
+/// jsx
+jsx: ?Jsx = null,
+
+/// tsconfig_override
+tsconfig_override: ?[]const u8 = null,
+
+/// resolve
+resolve: ?ResolveMode = null,
+
+/// public_url
+public_url: ?[]const u8 = null,
+
+/// absolute_working_dir
+absolute_working_dir: ?[]const u8 = null,
+
+/// define
+define: ?StringMap = null,
+
+/// preserve_symlinks
+preserve_symlinks: ?bool = null,
+
+/// entry_points
+entry_points: []const []const u8,
+
+/// write
+write: ?bool = null,
+
+/// inject
+inject: []const []const u8,
+
+/// output_dir
+output_dir: ?[]const u8 = null,
+
+/// external
+external: []const []const u8,
+
+/// loaders
+loaders: ?LoaderMap = null,
+
+/// main_fields
+main_fields: []const []const u8,
+
+/// platform
+platform: ?Platform = null,
+
+/// serve
+serve: ?bool = null,
+
+/// extension_order
+extension_order: []const []const u8,
+
+/// public_dir
+public_dir: ?[]const u8 = null,
+
+/// only_scan_dependencies
+only_scan_dependencies: ?ScanDependencyMode = null,
+
+/// generate_node_module_bundle
+generate_node_module_bundle: ?bool = null,
+
+/// node_modules_bundle_path
+node_modules_bundle_path: ?[]const u8 = null,
+
+
+pub fn decode(reader: anytype) anyerror!TransformOptions {
+ var this = std.mem.zeroes(TransformOptions);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.jsx = try reader.readValue(Jsx);
+},
+ 2 => {
+ this.tsconfig_override = try reader.readValue([]const u8);
+},
+ 3 => {
+ this.resolve = try reader.readValue(ResolveMode);
+},
+ 4 => {
+ this.public_url = try reader.readValue([]const u8);
+},
+ 5 => {
+ this.absolute_working_dir = try reader.readValue([]const u8);
+},
+ 6 => {
+ this.define = try reader.readValue(StringMap);
+},
+ 7 => {
+ this.preserve_symlinks = try reader.readValue(bool);
+},
+ 8 => {
+ this.entry_points = try reader.readArray([]const u8);
+},
+ 9 => {
+ this.write = try reader.readValue(bool);
+},
+ 10 => {
+ this.inject = try reader.readArray([]const u8);
+},
+ 11 => {
+ this.output_dir = try reader.readValue([]const u8);
+},
+ 12 => {
+ this.external = try reader.readArray([]const u8);
+},
+ 13 => {
+ this.loaders = try reader.readValue(LoaderMap);
+},
+ 14 => {
+ this.main_fields = try reader.readArray([]const u8);
+},
+ 15 => {
+ this.platform = try reader.readValue(Platform);
+},
+ 16 => {
+ this.serve = try reader.readValue(bool);
+},
+ 17 => {
+ this.extension_order = try reader.readArray([]const u8);
+},
+ 18 => {
+ this.public_dir = try reader.readValue([]const u8);
+},
+ 19 => {
+ this.only_scan_dependencies = try reader.readValue(ScanDependencyMode);
+},
+ 20 => {
+ this.generate_node_module_bundle = try reader.readValue(bool);
+},
+ 21 => {
+ this.node_modules_bundle_path = try reader.readValue([]const u8);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.jsx) |jsx| {
+ try writer.writeFieldID(1);
+ try writer.writeValue(jsx);
+}
+if (this.tsconfig_override) |tsconfig_override| {
+ try writer.writeFieldID(2);
+ try writer.writeValue(tsconfig_override);
+}
+if (this.resolve) |resolve| {
+ try writer.writeFieldID(3);
+ try writer.writeEnum(resolve);
+}
+if (this.public_url) |public_url| {
+ try writer.writeFieldID(4);
+ try writer.writeValue(public_url);
+}
+if (this.absolute_working_dir) |absolute_working_dir| {
+ try writer.writeFieldID(5);
+ try writer.writeValue(absolute_working_dir);
+}
+if (this.define) |define| {
+ try writer.writeFieldID(6);
+ try writer.writeValue(define);
+}
+if (this.preserve_symlinks) |preserve_symlinks| {
+ try writer.writeFieldID(7);
+ try writer.writeInt(@intCast(u8, @boolToInt(preserve_symlinks)));
+}
+if (this.entry_points) |entry_points| {
+ try writer.writeFieldID(8);
+ try writer.writeArray([]const u8, entry_points);
+}
+if (this.write) |write| {
+ try writer.writeFieldID(9);
+ try writer.writeInt(@intCast(u8, @boolToInt(write)));
+}
+if (this.inject) |inject| {
+ try writer.writeFieldID(10);
+ try writer.writeArray([]const u8, inject);
+}
+if (this.output_dir) |output_dir| {
+ try writer.writeFieldID(11);
+ try writer.writeValue(output_dir);
+}
+if (this.external) |external| {
+ try writer.writeFieldID(12);
+ try writer.writeArray([]const u8, external);
+}
+if (this.loaders) |loaders| {
+ try writer.writeFieldID(13);
+ try writer.writeValue(loaders);
+}
+if (this.main_fields) |main_fields| {
+ try writer.writeFieldID(14);
+ try writer.writeArray([]const u8, main_fields);
+}
+if (this.platform) |platform| {
+ try writer.writeFieldID(15);
+ try writer.writeEnum(platform);
+}
+if (this.serve) |serve| {
+ try writer.writeFieldID(16);
+ try writer.writeInt(@intCast(u8, @boolToInt(serve)));
+}
+if (this.extension_order) |extension_order| {
+ try writer.writeFieldID(17);
+ try writer.writeArray([]const u8, extension_order);
+}
+if (this.public_dir) |public_dir| {
+ try writer.writeFieldID(18);
+ try writer.writeValue(public_dir);
+}
+if (this.only_scan_dependencies) |only_scan_dependencies| {
+ try writer.writeFieldID(19);
+ try writer.writeEnum(only_scan_dependencies);
+}
+if (this.generate_node_module_bundle) |generate_node_module_bundle| {
+ try writer.writeFieldID(20);
+ try writer.writeInt(@intCast(u8, @boolToInt(generate_node_module_bundle)));
+}
+if (this.node_modules_bundle_path) |node_modules_bundle_path| {
+ try writer.writeFieldID(21);
+ try writer.writeValue(node_modules_bundle_path);
+}
+try writer.endMessage();
+}
- pub const TransformResponseStatus = enum(u32) {
- _none,
- /// success
- success,
+};
- /// fail
- fail,
+pub const FileHandle = struct {
+/// path
+path: []const u8,
- _,
+/// size
+size: u32 = 0,
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
+/// fd
+fd: u32 = 0,
- pub const OutputFile = struct {
- /// data
- data: []const u8,
- /// path
- path: []const u8,
+pub fn decode(reader: anytype) anyerror!FileHandle {
+ var this = std.mem.zeroes(FileHandle);
- pub fn decode(reader: anytype) anyerror!OutputFile {
- var this = std.mem.zeroes(OutputFile);
+ this.path = try reader.readValue([]const u8);
+ this.size = try reader.readValue(u32);
+ this.fd = try reader.readValue(u32);
+ return this;
+}
- this.data = try reader.readArray(u8);
- this.path = try reader.readValue([]const u8);
- return this;
- }
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(this.path);
+ try writer.writeInt(this.size);
+ try writer.writeInt(this.fd);
+}
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeArray(u8, this.data);
- try writer.writeValue(this.path);
- }
- };
+};
- pub const TransformResponse = struct {
- /// status
- status: TransformResponseStatus,
+pub const Transform = struct {
+/// handle
+handle: ?FileHandle = null,
+
+/// path
+path: ?[]const u8 = null,
+
+/// contents
+contents: []const u8,
+
+/// loader
+loader: ?Loader = null,
+
+/// options
+options: ?TransformOptions = null,
+
+
+pub fn decode(reader: anytype) anyerror!Transform {
+ var this = std.mem.zeroes(Transform);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.handle = try reader.readValue(FileHandle);
+},
+ 2 => {
+ this.path = try reader.readValue([]const u8);
+},
+ 3 => {
+ this.contents = try reader.readArray(u8);
+},
+ 4 => {
+ this.loader = try reader.readValue(Loader);
+},
+ 5 => {
+ this.options = try reader.readValue(TransformOptions);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
- /// files
- files: []OutputFile,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.handle) |handle| {
+ try writer.writeFieldID(1);
+ try writer.writeValue(handle);
+}
+if (this.path) |path| {
+ try writer.writeFieldID(2);
+ try writer.writeValue(path);
+}
+if (this.contents) |contents| {
+ try writer.writeFieldID(3);
+ try writer.writeArray(u8, contents);
+}
+if (this.loader) |loader| {
+ try writer.writeFieldID(4);
+ try writer.writeEnum(loader);
+}
+if (this.options) |options| {
+ try writer.writeFieldID(5);
+ try writer.writeValue(options);
+}
+try writer.endMessage();
+}
- /// errors
- errors: []Message,
+};
- pub fn decode(reader: anytype) anyerror!TransformResponse {
- var this = std.mem.zeroes(TransformResponse);
+pub const TransformResponseStatus = enum(u32) {
- this.status = try reader.readValue(TransformResponseStatus);
- this.files = try reader.readArray(OutputFile);
- this.errors = try reader.readArray(Message);
- return this;
- }
+_none,
+ /// success
+ success,
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeEnum(this.status);
- try writer.writeArray(OutputFile, this.files);
- try writer.writeArray(Message, this.errors);
- }
- };
+ /// fail
+ fail,
- pub const MessageKind = enum(u32) {
- _none,
- /// err
- err,
+_,
- /// warn
- warn,
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
- /// note
- note,
+
+};
- /// debug
- debug,
+pub const OutputFile = struct {
+/// data
+data: []const u8,
- _,
+/// path
+path: []const u8,
- pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
- return try std.json.stringify(@tagName(self), opts, o);
- }
- };
- pub const Location = struct {
- /// file
- file: []const u8,
+pub fn decode(reader: anytype) anyerror!OutputFile {
+ var this = std.mem.zeroes(OutputFile);
- /// namespace
- namespace: []const u8,
+ this.data = try reader.readArray(u8);
+ this.path = try reader.readValue([]const u8);
+ return this;
+}
- /// line
- line: i32 = 0,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeArray(u8, this.data);
+ try writer.writeValue(this.path);
+}
- /// column
- column: i32 = 0,
+};
- /// line_text
- line_text: []const u8,
+pub const TransformResponse = struct {
+/// status
+status: TransformResponseStatus,
- /// suggestion
- suggestion: []const u8,
+/// files
+files: []const OutputFile,
- /// offset
- offset: u32 = 0,
+/// errors
+errors: []const Message,
- pub fn decode(reader: anytype) anyerror!Location {
- var this = std.mem.zeroes(Location);
- this.file = try reader.readValue([]const u8);
- this.namespace = try reader.readValue([]const u8);
- this.line = try reader.readValue(i32);
- this.column = try reader.readValue(i32);
- this.line_text = try reader.readValue([]const u8);
- this.suggestion = try reader.readValue([]const u8);
- this.offset = try reader.readValue(u32);
- return this;
- }
+pub fn decode(reader: anytype) anyerror!TransformResponse {
+ var this = std.mem.zeroes(TransformResponse);
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeValue(this.file);
- try writer.writeValue(this.namespace);
- try writer.writeInt(this.line);
- try writer.writeInt(this.column);
- try writer.writeValue(this.line_text);
- try writer.writeValue(this.suggestion);
- try writer.writeInt(this.offset);
- }
- };
+ this.status = try reader.readValue(TransformResponseStatus);
+ this.files = try reader.readArray(OutputFile);
+ this.errors = try reader.readArray(Message);
+ return this;
+}
- pub const MessageData = struct {
- /// text
- text: ?[]const u8 = null,
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeEnum(this.status);
+ try writer.writeArray(OutputFile, this.files);
+ try writer.writeArray(Message, this.errors);
+}
- /// location
- location: ?Location = null,
+};
- pub fn decode(reader: anytype) anyerror!MessageData {
- var this = std.mem.zeroes(MessageData);
+pub const MessageKind = enum(u32) {
- while (true) {
- switch (try reader.readByte()) {
- 0 => {
- return this;
- },
+_none,
+ /// err
+ err,
- 1 => {
- this.text = try reader.readValue([]const u8);
- },
- 2 => {
- this.location = try reader.readValue(Location);
- },
- else => {
- return error.InvalidMessage;
- },
+ /// warn
+ warn,
+
+ /// note
+ note,
+
+ /// debug
+ debug,
+
+_,
+
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
}
- }
- unreachable;
- }
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- if (this.text) |text| {
- try writer.writeFieldID(1);
- try writer.writeValue(text);
- }
- if (this.location) |location| {
- try writer.writeFieldID(2);
- try writer.writeValue(location);
- }
- try writer.endMessage();
- }
- };
+
+};
- pub const Message = struct {
- /// kind
- kind: MessageKind,
+pub const Location = struct {
+/// file
+file: []const u8,
- /// data
- data: MessageData,
+/// namespace
+namespace: []const u8,
- /// notes
- notes: []MessageData,
+/// line
+line: i32 = 0,
- pub fn decode(reader: anytype) anyerror!Message {
- var this = std.mem.zeroes(Message);
+/// column
+column: i32 = 0,
- this.kind = try reader.readValue(MessageKind);
- this.data = try reader.readValue(MessageData);
- this.notes = try reader.readArray(MessageData);
- return this;
- }
+/// line_text
+line_text: []const u8,
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeEnum(this.kind);
- try writer.writeValue(this.data);
- try writer.writeArray(MessageData, this.notes);
- }
- };
+/// suggestion
+suggestion: []const u8,
- pub const Log = struct {
- /// warnings
- warnings: u32 = 0,
+/// offset
+offset: u32 = 0,
- /// errors
- errors: u32 = 0,
- /// msgs
- msgs: []Message,
+pub fn decode(reader: anytype) anyerror!Location {
+ var this = std.mem.zeroes(Location);
- pub fn decode(reader: anytype) anyerror!Log {
- var this = std.mem.zeroes(Log);
+ this.file = try reader.readValue([]const u8);
+ this.namespace = try reader.readValue([]const u8);
+ this.line = try reader.readValue(i32);
+ this.column = try reader.readValue(i32);
+ this.line_text = try reader.readValue([]const u8);
+ this.suggestion = try reader.readValue([]const u8);
+ this.offset = try reader.readValue(u32);
+ return this;
+}
- this.warnings = try reader.readValue(u32);
- this.errors = try reader.readValue(u32);
- this.msgs = try reader.readArray(Message);
- return this;
- }
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(this.file);
+ try writer.writeValue(this.namespace);
+ try writer.writeInt(this.line);
+ try writer.writeInt(this.column);
+ try writer.writeValue(this.line_text);
+ try writer.writeValue(this.suggestion);
+ try writer.writeInt(this.offset);
+}
+
+};
+
+pub const MessageData = struct {
+/// text
+text: ?[]const u8 = null,
+
+/// location
+location: ?Location = null,
+
+
+pub fn decode(reader: anytype) anyerror!MessageData {
+ var this = std.mem.zeroes(MessageData);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.text = try reader.readValue([]const u8);
+},
+ 2 => {
+ this.location = try reader.readValue(Location);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.text) |text| {
+ try writer.writeFieldID(1);
+ try writer.writeValue(text);
+}
+if (this.location) |location| {
+ try writer.writeFieldID(2);
+ try writer.writeValue(location);
+}
+try writer.endMessage();
+}
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeInt(this.warnings);
- try writer.writeInt(this.errors);
- try writer.writeArray(Message, this.msgs);
- }
- };
};
+pub const Message = struct {
+/// kind
+kind: MessageKind,
+
+/// data
+data: MessageData,
+
+/// notes
+notes: []const MessageData,
+
+
+pub fn decode(reader: anytype) anyerror!Message {
+ var this = std.mem.zeroes(Message);
+
+ this.kind = try reader.readValue(MessageKind);
+ this.data = try reader.readValue(MessageData);
+ this.notes = try reader.readArray(MessageData);
+ return this;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeEnum(this.kind);
+ try writer.writeValue(this.data);
+ try writer.writeArray(MessageData, this.notes);
+}
+
+};
+
+pub const Log = struct {
+/// warnings
+warnings: u32 = 0,
+
+/// errors
+errors: u32 = 0,
+
+/// msgs
+msgs: []const Message,
+
+
+pub fn decode(reader: anytype) anyerror!Log {
+ var this = std.mem.zeroes(Log);
+
+ this.warnings = try reader.readValue(u32);
+ this.errors = try reader.readValue(u32);
+ this.msgs = try reader.readArray(Message);
+ return this;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.warnings);
+ try writer.writeInt(this.errors);
+ try writer.writeArray(Message, this.msgs);
+}
+
+};
+
+
+};
+
+
const ExamplePackedStruct = packed struct {
len: u32 = 0,
offset: u32 = 0,
diff --git a/src/bundler.zig b/src/bundler.zig
index 9b8ac11b3..630e96772 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -27,7 +27,7 @@ const resolve_path = @import("./resolver/resolve_path.zig");
const runtime = @import("./runtime.zig");
const Timer = @import("./timer.zig");
const hash_map = @import("hash_map.zig");
-
+const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
const DebugLogs = _resolver.DebugLogs;
pub const ServeResult = struct {
@@ -374,14 +374,22 @@ pub fn NewBundler(cache_files: bool) type {
javascript_bundle.modules = this.module_list.items;
javascript_bundle.packages = sorted_package_list;
javascript_bundle.manifest_string = this.header_string_buffer.list.items;
-
- javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp()));
-
- javascript_bundle.import_from_name = destination;
-
var etag_bytes: [8]u8 = undefined;
- std.mem.writeIntNative(u64, &etag_bytes, hasher.final());
+ const etag_u64 = hasher.final();
+ std.mem.writeIntNative(u64, &etag_bytes, etag_u64);
javascript_bundle.etag = &etag_bytes;
+ javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp()));
+
+ const basename = std.fs.path.basename(destination);
+ const extname = std.fs.path.extension(basename);
+ javascript_bundle.import_from_name = try std.fmt.allocPrint(
+ this.allocator,
+ "/{s}.{x}.jsb",
+ .{
+ basename[0 .. basename.len - extname.len],
+ etag_u64,
+ },
+ );
javascript_bundle_container.bundle_format_version = current_version;
javascript_bundle_container.bundle = javascript_bundle;
@@ -450,6 +458,7 @@ pub fn NewBundler(cache_files: bool) type {
fn processImportRecord(this: *GenerateNodeModuleBundle, import_record: ImportRecord) !void {}
const node_module_root_string = "node_modules" ++ std.fs.path.sep_str;
threadlocal var package_key_buf: [512]u8 = undefined;
+
fn processFile(this: *GenerateNodeModuleBundle, _resolve: _resolver.Result) !void {
var resolve = _resolve;
if (resolve.is_external) return;
@@ -459,7 +468,7 @@ pub fn NewBundler(cache_files: bool) type {
var bundler = this.bundler;
defer this.scan_pass_result.reset();
defer this.bundler.resetStore();
- const file_path = resolve.path_pair.primary;
+ var file_path = resolve.path_pair.primary;
// If we're in a node_module, build that almost normally
if (resolve.is_from_node_modules) {
@@ -476,12 +485,13 @@ pub fn NewBundler(cache_files: bool) type {
true,
);
const source = logger.Source.initFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
- const source_dir = file_path.name.dir;
+ const source_dir = file_path.name.dirWithTrailingSlash();
var jsx = bundler.options.jsx;
jsx.parse = loader.isJSX();
var opts = js_parser.Parser.Options.init(jsx, loader);
opts.output_commonjs = true;
+
var ast: js_ast.Ast = (try bundler.resolver.caches.js.parse(
bundler.allocator,
opts,
@@ -504,11 +514,37 @@ pub fn NewBundler(cache_files: bool) type {
}
const absolute_path = resolved_import.path_pair.primary.text;
+ const package_json: *const PackageJSON = (resolved_import.package_json orelse (this.bundler.resolver.packageJSONForResolvedNodeModule(resolved_import) orelse {
+ this.log.addWarningFmt(
+ &source,
+ import_record.range.loc,
+ this.allocator,
+ "Failed to find package.json for \"{s}\". This will be unresolved and might break at runtime. If it's external, you could add it to the external list.",
+ .{
+ resolved_import.path_pair.primary.text,
+ source.path.text,
+ },
+ ) catch {};
+ continue;
+ }));
+
+ // trim node_modules/${package.name}/ from the string to save space
+ // This reduces metadata size by about 30% for a large-ish file
+ // A future optimization here could be to reuse the string from the original path
+ var node_module_root = strings.indexOf(resolved_import.path_pair.primary.text, node_module_root_string) orelse unreachable;
+ // // omit package name
+ // node_module_root += package_json.name.len;
+ // omit node_modules
+ node_module_root += node_module_root_string.len;
+ // omit trailing separator
+ node_module_root += 1;
// It should be the first index, not the last to support bundling multiple of the same package
- if (strings.indexOf(absolute_path, node_module_root_string)) |node_module_start| {
- import_record.path = Fs.Path.init(absolute_path[node_module_root_string.len + node_module_start ..]);
- }
+ import_record.path = Fs.Path.init(
+ absolute_path[node_module_root..],
+ );
+
+ import_record.package_json_hash = package_json.hash;
const get_or_put_result = try this.resolved_paths.getOrPut(absolute_path);
@@ -520,6 +556,25 @@ pub fn NewBundler(cache_files: bool) type {
} else |err| {}
}
+ const PackageNameVersionPair = struct { name: string, version: string, hash: u32 };
+ var package: PackageNameVersionPair = undefined;
+
+ if (resolve.package_json) |package_json| {
+ package = .{
+ .name = package_json.name,
+ .version = package_json.version,
+ .hash = package_json.hash,
+ };
+ } else {
+ if (this.bundler.resolver.packageJSONForResolvedNodeModule(&resolve)) |package_json| {
+ package = .{
+ .name = package_json.name,
+ .version = package_json.version,
+ .hash = package_json.hash,
+ };
+ }
+ }
+
const code_offset = this.tmpfile_byte_offset - code_start_byte_offset;
var writer = js_printer.NewFileWriter(this.tmpfile);
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
@@ -538,6 +593,7 @@ pub fn NewBundler(cache_files: bool) type {
.externals = ast.externals,
// Indent by one
.indent = 1,
+ .package_json_hash = package.hash,
.runtime_imports = ast.runtime_imports,
},
Linker,
@@ -546,30 +602,14 @@ pub fn NewBundler(cache_files: bool) type {
);
this.tmpfile_byte_offset += code_length;
- const PackageNameVersionPair = struct { name: string, version: string };
- var package: PackageNameVersionPair = undefined;
-
- if (resolve.package_json_version) |version| {
- package = .{ .name = resolve.package_json_name.?, .version = version };
- } else {
- if (this.bundler.resolver.packageJSONForResolvedNodeModule(&resolve)) |package_json| {
- package = .{
- .name = package_json.name,
- .version = package_json.version,
- };
- }
- }
-
- const package_id_key = try std.fmt.bufPrint(&package_key_buf, "{s}@{s}", .{ package.name, package.version });
- const package_id_key_hash = std.hash.Wyhash.hash(0, package_id_key);
- var package_get_or_put_entry = try this.package_list_map.getOrPut(package_id_key_hash);
+ var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash);
if (!package_get_or_put_entry.found_existing) {
package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len);
try this.package_list.append(
Api.JavascriptBundledPackage{
.name = try this.appendHeaderString(package.name),
.version = try this.appendHeaderString(package.version),
- .hash = @truncate(u32, package_id_key_hash),
+ .hash = package.hash,
},
);
}
@@ -584,11 +624,14 @@ pub fn NewBundler(cache_files: bool) type {
// omit trailing separator
node_module_root += 1;
+ var path_str = resolve.path_pair.primary.text[node_module_root..];
+ var path_extname_length = @truncate(u8, std.fs.path.extension(path_str).len);
try this.module_list.append(
Api.JavascriptBundledModule{
.path = try this.appendHeaderString(
- resolve.path_pair.primary.text[node_module_root..],
+ path_str,
),
+ .path_extname_length = path_extname_length,
.package_id = package_get_or_put_entry.value_ptr.*,
.code = Api.StringPointer{
.length = @truncate(u32, code_length),
@@ -615,7 +658,7 @@ pub fn NewBundler(cache_files: bool) type {
) catch return;
const source = logger.Source.initFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
- const source_dir = file_path.name.dir;
+ const source_dir = file_path.name.dirWithTrailingSlash();
var jsx = bundler.options.jsx;
jsx.parse = loader.isJSX();
@@ -802,7 +845,7 @@ pub fn NewBundler(cache_files: bool) type {
) catch return null;
const source = logger.Source.initFile(Fs.File{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null;
- const source_dir = file_path.name.dir;
+ const source_dir = file_path.name.dirWithTrailingSlash();
var jsx = bundler.options.jsx;
jsx.parse = loader.isJSX();
@@ -1258,6 +1301,7 @@ pub fn NewBundler(cache_files: bool) type {
if (bundler.resolve_results.contains(key)) {
continue;
}
+
try bundler.resolve_results.put(key, result);
entry_points[entry_point_i] = result;
@@ -1281,8 +1325,9 @@ pub fn NewBundler(cache_files: bool) type {
} else {
const output_dir = bundler.options.output_dir_handle orelse {
Output.printError("Invalid or missing output directory.", .{});
- std.os.exit(1);
+ Global.crash();
};
+
try switch (bundler.options.import_path_format) {
.relative => bundler.processResolveQueue(.relative, std.fs.Dir, output_dir),
.relative_nodejs => bundler.processResolveQueue(.relative_nodejs, std.fs.Dir, output_dir),
@@ -1360,45 +1405,18 @@ pub const Transformer = struct {
) !options.TransformResult {
js_ast.Expr.Data.Store.create(allocator);
js_ast.Stmt.Data.Store.create(allocator);
- var raw_defines = try options.stringHashMapFromArrays(RawDefines, allocator, opts.define_keys, opts.define_values);
- if (opts.define_keys.len == 0) {
- try raw_defines.put(options.DefaultUserDefines.NodeEnv.Key, options.DefaultUserDefines.NodeEnv.Value);
- }
- var user_defines = try DefineData.from_input(raw_defines, log, alloc.static);
- var define = try Define.init(
- alloc.static,
- user_defines,
- );
+ var define = try options.definesFromTransformOptions(allocator, log, opts.define);
const cwd = if (opts.absolute_working_dir) |workdir| try std.fs.realpathAlloc(allocator, workdir) else try std.process.getCwdAlloc(allocator);
const output_dir_parts = [_]string{ try std.process.getCwdAlloc(allocator), opts.output_dir orelse "out" };
const output_dir = try std.fs.path.join(allocator, &output_dir_parts);
var output_files = try std.ArrayList(options.OutputFile).initCapacity(allocator, opts.entry_points.len);
- var loader_values = try allocator.alloc(options.Loader, opts.loader_values.len);
const platform = options.Platform.from(opts.platform);
const out_extensions = platform.outExtensions(allocator);
- for (loader_values) |_, i| {
- const loader = switch (opts.loader_values[i]) {
- .jsx => options.Loader.jsx,
- .js => options.Loader.js,
- .ts => options.Loader.ts,
- .css => options.Loader.css,
- .tsx => options.Loader.tsx,
- .json => options.Loader.json,
- else => unreachable,
- };
-
- loader_values[i] = loader;
- }
- var loader_map = try options.stringHashMapFromArrays(
- std.StringHashMap(options.Loader),
- allocator,
- opts.loader_keys,
- loader_values,
- );
+ var loader_map = try options.loadersFromTransformOptions(allocator, opts.loaders);
var use_default_loaders = loader_map.count() == 0;
var jsx = if (opts.jsx) |_jsx| try options.JSX.Pragma.fromApi(_jsx, allocator) else options.JSX.Pragma{};
diff --git a/src/cli.zig b/src/cli.zig
index 472c6ac02..c6b1c38bd 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -132,7 +132,9 @@ pub const Cli = struct {
clap.parseParam("--platform <STR> \"browser\" or \"node\". Defaults to \"browser\"") catch unreachable,
clap.parseParam("--main-fields <STR>... Main fields to lookup in package.json. Defaults to --platform dependent") catch unreachable,
clap.parseParam("--scan Instead of bundling or transpiling, print a list of every file imported by an entry point, recursively") catch unreachable,
- clap.parseParam("--jsb Generate a new node_modules.jsb file from node_modules and entry point(s)") catch unreachable,
+ clap.parseParam("--new-jsb Generate a new node_modules.jsb file from node_modules and entry point(s)") catch unreachable,
+ clap.parseParam("--jsb <STR> Use a Speedy JavaScript Bundle (default: \"./node_modules.jsb\" if exists)") catch unreachable,
+ // clap.parseParam("--no-jsb Use a Speedy JavaScript Bundle (default: \"./node_modules.jsb\" if exists)") catch unreachable,
clap.parseParam("<POS>... Entry points to use") catch unreachable,
};
@@ -186,6 +188,22 @@ pub const Cli = struct {
var react_fast_refresh = args.flag("--react-fast-refresh");
var main_fields = args.options("--main-fields");
+ var node_modules_bundle_path = args.option("--jsb") orelse brk: {
+ if (args.flag("--new-jsb")) {
+ break :brk null;
+ }
+
+ const node_modules_bundle_path_absolute = resolve_path.joinAbs(cwd, .auto, "node_modules.jsb");
+ std.fs.accessAbsolute(node_modules_bundle_path_absolute, .{}) catch |err| {
+ break :brk null;
+ };
+ break :brk try allocator.dupe(u8, node_modules_bundle_path_absolute);
+ };
+
+ if (args.flag("--new-jsb")) {
+ node_modules_bundle_path = null;
+ }
+
const PlatformMatcher = strings.ExactSizeMatcher(8);
const ResoveMatcher = strings.ExactSizeMatcher(8);
@@ -266,10 +284,15 @@ pub const Cli = struct {
.absolute_working_dir = cwd,
.tsconfig_override = tsconfig_override,
.public_url = public_url,
- .define_keys = define_keys,
- .define_values = define_values,
- .loader_keys = loader_keys,
- .loader_values = loader_values,
+ .define = .{
+ .keys = define_keys,
+ .values = define_values,
+ },
+ .loaders = .{
+ .extensions = loader_keys,
+ .loaders = loader_values,
+ },
+ .node_modules_bundle_path = node_modules_bundle_path,
.public_dir = if (args.option("--public-dir")) |public_dir| allocator.dupe(u8, public_dir) catch unreachable else null,
.write = write,
.serve = serve,
@@ -279,7 +302,7 @@ pub const Cli = struct {
.main_fields = args.options("--main-fields"),
.platform = platform,
.only_scan_dependencies = if (args.flag("--scan")) Api.ScanDependencyMode.all else Api.ScanDependencyMode._none,
- .generate_node_module_bundle = if (args.flag("--jsb")) true else false,
+ .generate_node_module_bundle = if (args.flag("--new-jsb")) true else false,
};
}
};
@@ -363,6 +386,13 @@ pub const Cli = struct {
args,
);
},
+ .lazy => {
+ result = try bundler.ServeBundler.bundle(
+ allocator,
+ &log,
+ args,
+ );
+ },
else => {
result = try bundler.Bundler.bundle(
allocator,
diff --git a/src/fs.zig b/src/fs.zig
index a6a1584f4..d3957ebe3 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -918,6 +918,18 @@ pub const PathName = struct {
return MutableString.ensureValidIdentifier(self.base, allocator);
}
+ pub fn dirWithTrailingSlash(this: *const PathName) string {
+ // The three strings basically always point to the same underlying ptr
+ // so if dir does not have a trailing slash, but is spaced one apart from the basename
+ // we can assume there is a trailing slash there
+ // so we extend the original slice's length by one
+ if (this.dir[this.dir.len - 1] != std.fs.path.sep_posix and (@ptrToInt(this.dir.ptr) + this.dir.len + 1) == @ptrToInt(this.base.ptr)) {
+ return this.dir.ptr[0 .. this.dir.len + 1];
+ }
+
+ return this.dir;
+ }
+
pub fn init(_path: string) PathName {
var path = _path;
var base = path;
diff --git a/src/global.zig b/src/global.zig
index bc0ab3ee3..6610abafd 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -104,6 +104,7 @@ pub const Output = struct {
}
pub fn disableBuffering() void {
+ Output.flush();
enable_buffering = false;
}
@@ -285,14 +286,18 @@ pub const Output = struct {
printer(new_fmt[0..new_fmt_i], args);
}
- pub fn pretty(comptime fmt: string, args: anytype) void {
+ pub fn prettyWithPrinter(comptime fmt: string, args: anytype, printer: anytype) void {
if (enable_ansi_colors) {
- _pretty(fmt, args, print, true);
+ _pretty(fmt, args, printer, true);
} else {
- _pretty(fmt, args, print, false);
+ _pretty(fmt, args, printer, false);
}
}
+ pub fn pretty(comptime fmt: string, args: anytype) void {
+ prettyWithPrinter(fmt, args, print);
+ }
+
pub fn prettyln(comptime fmt: string, args: anytype) void {
if (enable_ansi_colors) {
_pretty(fmt, args, println, true);
@@ -309,6 +314,26 @@ pub const Output = struct {
return printError(fmt, args);
}
+ pub fn prettyError(comptime fmt: string, args: anytype) void {
+ prettyWithPrinter(fmt, args, printError);
+ }
+
+ pub fn prettyErrorln(comptime fmt: string, args: anytype) void {
+ if (fmt[fmt.len - 1] != '\n') {
+ return prettyWithPrinter(
+ fmt ++ "\n",
+ args,
+ printError,
+ );
+ }
+
+ return prettyWithPrinter(
+ fmt,
+ args,
+ printError,
+ );
+ }
+
pub fn errorLn(comptime fmt: string, args: anytype) void {
return printErrorln(fmt, args);
}
@@ -329,8 +354,10 @@ pub const Global = struct {
pub fn panic(comptime fmt: string, args: anytype) noreturn {
if (isWasm) {
Output.print(fmt, args);
+ Output.flush();
@panic(fmt);
} else {
+ Output.flush();
std.debug.panic(fmt, args);
}
}
@@ -338,6 +365,12 @@ pub const Global = struct {
pub fn notimpl() noreturn {
Global.panic("Not implemented yet!!!!!", .{});
}
+
+ // Make sure we always print any leftover
+ pub fn crash() noreturn {
+ Output.flush();
+ std.os.exit(1);
+ }
};
pub const FileDescriptorType = if (isBrowser) u0 else std.os.fd_t;
diff --git a/src/import_record.zig b/src/import_record.zig
index 69c498882..89801b9be 100644
--- a/src/import_record.zig
+++ b/src/import_record.zig
@@ -44,6 +44,9 @@ pub const ImportRecord = struct {
range: logger.Range,
path: fs.Path,
+ // 0 is invalid
+ package_json_hash: u32 = 0,
+
source_index: Ref.Int = std.math.maxInt(Ref.Int),
// True for the following cases:
@@ -60,6 +63,8 @@ pub const ImportRecord = struct {
is_internal: bool = false,
+ is_bundled: bool = false,
+
// Sometimes the parser creates an import record and decides it isn't needed.
// For example, TypeScript code may have import statements that later turn
// out to be type-only imports after analyzing the whole file.
diff --git a/src/js_ast.zig b/src/js_ast.zig
index a0add3157..9be9d461a 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -3474,6 +3474,8 @@ pub const Ast = struct {
module_ref: ?Ref = null,
wrapper_ref: ?Ref = null,
+ bundle_namespace_ref: ?Ref = null,
+
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
// is conveniently fully parallelized.
diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig
index 1802d7168..81ab8e2d6 100644
--- a/src/js_parser/js_parser.zig
+++ b/src/js_parser/js_parser.zig
@@ -1522,6 +1522,24 @@ pub const Parser = struct {
// June 4: "Parsing took: 18028000"
// June 4: "Rest of this took: 8003000"
_ = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
+
+ // Symbol use counts are unavailable
+ // So we say "did we parse any JSX?"
+ // if yes, just automatically add the import so that .jsb knows to include the file.
+ if (self.options.jsx.parse and p.needs_jsx_import) {
+ _ = p.addImportRecord(
+ .internal,
+ logger.Loc{ .start = 0 },
+ p.options.jsx.import_source,
+ );
+ // Ensure we have both classic and automatic
+ // This is to handle cases where they use fragments in the automatic runtime
+ _ = p.addImportRecord(
+ .internal,
+ logger.Loc{ .start = 0 },
+ p.options.jsx.classic_import_source,
+ );
+ }
}
pub fn parse(self: *Parser) !js_ast.Result {
@@ -1770,7 +1788,7 @@ pub const Parser = struct {
};
decl_i += 1;
}
- const import_record_id = p.addImportRecord(.internal, loc, p.options.jsx.import_source);
+ const import_record_id = p.addImportRecord(.internal, loc, p.options.jsx.classic_import_source);
jsx_part_stmts[stmt_i] = p.s(S.Import{
.namespace_ref = classic_namespace_ref,
.star_name_loc = loc,
@@ -1826,7 +1844,15 @@ pub const Parser = struct {
var runtime_imports_iter = p.runtime_imports.iter();
while (runtime_imports_iter.next()) |entry| {
const imports = [_]u16{entry.key};
- p.generateImportStmt(RuntimeImports.Name, &imports, &before, p.runtime_imports, null, "import_") catch unreachable;
+ p.generateImportStmt(
+ RuntimeImports.Name,
+ &imports,
+ &before,
+ p.runtime_imports,
+ null,
+ "import_",
+ true,
+ ) catch unreachable;
}
if (p.cjs_import_stmts.items.len > 0 and !p.options.output_commonjs) {
@@ -2009,6 +2035,7 @@ pub fn NewParser(
) type {
const ImportRecordList = if (only_scan_imports_and_do_not_visit) *std.ArrayList(ImportRecord) else std.ArrayList(ImportRecord);
const NamedImportsType = if (only_scan_imports_and_do_not_visit) *js_ast.Ast.NamedImports else js_ast.Ast.NamedImports;
+ const NeedsJSXType = if (only_scan_imports_and_do_not_visit) bool else void;
// P is for Parser!
// public only because of Binding.ToExpr
@@ -2110,6 +2137,14 @@ pub fn NewParser(
top_level_symbol_to_parts: Map(js_ast.Ref, List(u32)),
import_namespace_cc_map: Map(ImportNamespaceCallOrConstruct, bool),
+ // When we're only scanning the imports
+ // If they're using the automatic JSX runtime
+ // We won't know that we need to import JSX robustly because we don't track
+ // symbol counts. Instead, we ask:
+ // "Did we parse anything that looked like JSX"?
+ // If yes, then automatically add the JSX import.
+ needs_jsx_import: NeedsJSXType,
+
// The parser does two passes and we need to pass the scope tree information
// from the first pass to the second pass. That's done by tracking the calls
// to pushScopeForParsePass() and popScope() during the first pass in
@@ -2736,11 +2771,11 @@ pub fn NewParser(
symbols: anytype,
additional_stmt: ?Stmt,
comptime suffix: string,
+ comptime is_internal: bool,
) !void {
const import_record_i = p.addImportRecordByRange(.stmt, logger.Range.None, import_path);
var import_record: *ImportRecord = &p.import_records.items[import_record_i];
-
- import_record.is_internal = true;
+ import_record.is_internal = is_internal;
var import_path_identifier = try import_record.path.name.nonUniqueNameString(p.allocator);
var namespace_identifier = try p.allocator.alloc(u8, import_path_identifier.len + suffix.len);
var clause_items = try p.allocator.alloc(js_ast.ClauseItem, imports.len);
@@ -9254,6 +9289,10 @@ pub fn NewParser(
}
pub fn parseJSXElement(p: *P, loc: logger.Loc) !Expr {
+ if (only_scan_imports_and_do_not_visit) {
+ p.needs_jsx_import = true;
+ }
+
var tag = try JSXTag.parse(p);
// The tag may have TypeScript type arguments: "<Foo<T>/>"
@@ -13237,6 +13276,7 @@ pub fn NewParser(
.require_resolve_transposer = undefined,
.source = source,
+ .needs_jsx_import = if (only_scan_imports_and_do_not_visit) false else NeedsJSXType{},
.lexer = lexer,
};
diff --git a/src/linker.zig b/src/linker.zig
index 4d3d9ff69..fd00911f5 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -88,6 +88,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
var needs_runtime = result.ast.uses_exports_ref or result.ast.uses_module_ref or result.ast.runtime_imports.hasAny();
const source_dir = file_path.name.dir;
var externals = std.ArrayList(u32).init(linker.allocator);
+ var needs_bundle = false;
// Step 1. Resolve imports & requires
switch (result.loader) {
@@ -106,12 +107,74 @@ pub fn NewLinker(comptime BundlerType: type) type {
continue;
}
- if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
+ if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*_resolved_import| {
+ var resolved_import: *Resolver.Result = _resolved_import;
if (resolved_import.is_external) {
externals.append(record_index) catch unreachable;
continue;
}
+ if (resolved_import.package_json) |package_json| {
+ if (linker.options.node_modules_bundle) |node_modules_bundle| {
+ if (strings.contains(package_json.source.path.name.dirWithTrailingSlash(), "node_modules")) {
+ if (node_modules_bundle.getPackageIDByName(package_json.name)) |possible_pkg_ids| {
+ const pkg_id: u32 = brk: {
+ for (possible_pkg_ids) |pkg_id| {
+ const pkg = node_modules_bundle.bundle.packages[pkg_id];
+ if (pkg.hash == package_json.hash) {
+ break :brk pkg_id;
+ }
+ }
+
+ linker.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ linker.allocator,
+ "\"{s}\" version changed, we'll need to regenerate the .jsb.\nOld version: \"{s}\"\nNew version: \"{s}\"",
+ .{
+ package_json.name,
+ node_modules_bundle.str(node_modules_bundle.bundle.packages[possible_pkg_ids[0]].version),
+ package_json.version,
+ },
+ ) catch {};
+ return error.RebuildJSB;
+ };
+
+ const package = &node_modules_bundle.bundle.packages[pkg_id];
+
+ if (isDebug) {
+ std.debug.assert(strings.eql(node_modules_bundle.str(package.name), package_json.name));
+ }
+
+ const package_relative_path = linker.fs.relative(
+ package_json.source.path.name.dirWithTrailingSlash(),
+ resolved_import.path_pair.primary.text,
+ );
+
+ const found_module = node_modules_bundle.findModuleInPackage(package, package_relative_path) orelse {
+ linker.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ linker.allocator,
+ "New dependency import: \"{s}/{s}\"\nWe'll need to regenerate the .jsb.",
+ .{
+ package_json.name,
+ package_relative_path,
+ },
+ ) catch {};
+ return error.RebuildJSB;
+ };
+
+ import_record.is_bundled = true;
+ import_record.path.text = node_modules_bundle.str(found_module.path);
+ import_record.package_json_hash = package.hash;
+ needs_bundle = true;
+ continue;
+ }
+ }
+ }
+ }
+
linker.processImportRecord(
// Include trailing slash
file_path.text[0 .. source_dir.len + 1],
@@ -138,7 +201,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
} else |err| {
switch (err) {
error.ModuleNotFound => {
- if (BundlerType.Resolver.isPackagePath(import_record.path.text)) {
+ if (Resolver.isPackagePath(import_record.path.text)) {
if (linker.options.platform != .node and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
try linker.log.addRangeErrorFmt(
&result.source,
@@ -198,9 +261,6 @@ pub fn NewLinker(comptime BundlerType: type) type {
const ImportPathsList = allocators.BSSStringList(512, 128);
pub var relative_paths_list: *ImportPathsList = undefined;
- threadlocal var relative_path_allocator: std.heap.FixedBufferAllocator = undefined;
- threadlocal var relative_path_allocator_buf: [4096]u8 = undefined;
- threadlocal var relative_path_allocator_buf_loaded: bool = false;
pub fn generateImportPath(
linker: *ThisLinker,
@@ -209,20 +269,6 @@ pub fn NewLinker(comptime BundlerType: type) type {
package_version: ?string,
comptime import_path_format: Options.BundleOptions.ImportPathFormat,
) !Fs.Path {
- if (!relative_path_allocator_buf_loaded) {
- relative_path_allocator_buf_loaded = true;
- relative_path_allocator = std.heap.FixedBufferAllocator.init(&relative_path_allocator_buf);
- }
- defer relative_path_allocator.reset();
-
- var absolute_pathname = Fs.PathName.init(source_path);
-
- if (!linker.options.preserve_extensions) {
- if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
- absolute_pathname.ext = ext;
- }
- }
-
switch (import_path_format) {
.relative => {
var pretty = try linker.allocator.dupe(u8, linker.fs.relative(source_dir, source_path));
@@ -238,6 +284,14 @@ pub fn NewLinker(comptime BundlerType: type) type {
},
.absolute_url => {
+ var absolute_pathname = Fs.PathName.init(source_path);
+
+ if (!linker.options.preserve_extensions) {
+ if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
+ absolute_pathname.ext = ext;
+ }
+ }
+
var base = linker.fs.relativeTo(source_path);
if (strings.lastIndexOfChar(base, '.')) |dot| {
base = base[0..dot];
@@ -285,7 +339,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
) !void {
// extremely naive.
- resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
+ resolve_result.is_from_node_modules = resolve_result.package_json != null or strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
// lazy means:
// Run the resolver
@@ -297,7 +351,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
import_record.path = try linker.generateImportPath(
source_dir,
resolve_result.path_pair.primary.text,
- resolve_result.package_json_version,
+ if (resolve_result.package_json) |package_json| package_json.version else "",
import_path_format,
);
}
diff --git a/src/node_module_bundle.zig b/src/node_module_bundle.zig
index ada125e22..ec85e4fb1 100644
--- a/src/node_module_bundle.zig
+++ b/src/node_module_bundle.zig
@@ -3,6 +3,19 @@ const Api = schema.Api;
const std = @import("std");
usingnamespace @import("global.zig");
+pub fn modulesIn(bundle: *const Api.JavascriptBundle, pkg: *const Api.JavascriptBundledPackage) []const Api.JavascriptBundledModule {
+ return bundle.modules[pkg.modules_offset .. pkg.modules_offset + pkg.modules_length];
+}
+
+// This corresponds to Api.JavascriptBundledPackage.hash
+pub const BundledPackageHash = u32;
+// This is the offset in the array of packages
+pub const BundledPackageID = u32;
+
+const PackageIDMap = std.AutoHashMap(BundledPackageHash, BundledPackageID);
+
+const PackageNameMap = std.StringHashMap([]BundledPackageID);
+
pub const NodeModuleBundle = struct {
container: Api.JavascriptBundleContainer,
bundle: Api.JavascriptBundle,
@@ -11,14 +24,199 @@ pub const NodeModuleBundle = struct {
bytes: []u8 = undefined,
fd: FileDescriptorType = 0,
+ // Lookup packages by ID - hash(name@version)
+ package_id_map: PackageIDMap,
+
+ // Lookup packages by name. Remember that you can have multiple versions of the same package.
+ package_name_map: PackageNameMap,
+
+ // This is stored as a single pre-allocated, flat array so we can avoid dynamic allocations.
+ package_name_ids_ptr: []BundledPackageID = &([_]BundledPackageID{}),
+
pub const magic_bytes = "#!/usr/bin/env speedy\n\n";
threadlocal var jsbundle_prefix: [magic_bytes.len + 5]u8 = undefined;
+ pub fn loadPackageMap(this: *NodeModuleBundle) !void {
+ this.package_name_map = PackageNameMap.init(this.allocator);
+ var ids = PackageIDMap.init(this.allocator);
+
+ const package_count = @truncate(u32, this.bundle.packages.len);
+
+ // this.package_has_multiple_versions = try std.bit_set.DynamicBitSet.initFull(package_count, this.allocator);
+
+ try ids.ensureCapacity(
+ package_count,
+ );
+ this.package_name_ids_ptr = try this.allocator.alloc(BundledPackageID, this.bundle.packages.len);
+ var remaining_names = this.package_name_ids_ptr;
+ try this.package_name_map.ensureCapacity(
+ package_count,
+ );
+ var prev_package_ids_for_name: []u32 = &[_]u32{};
+
+ for (this.bundle.packages) |package, _package_id| {
+ const package_id = @truncate(u32, _package_id);
+ std.debug.assert(package.hash != 0);
+ ids.putAssumeCapacityNoClobber(package.hash, @truncate(u32, package_id));
+
+ const package_name = this.str(package.name);
+ var entry = this.package_name_map.getOrPutAssumeCapacity(package_name);
+
+ if (entry.found_existing) {
+ // this.package_has_multiple_versions.set(prev_package_ids_for_name[prev_package_ids_for_name.len - 1]);
+ // Assert that multiple packages with the same name come immediately after another
+ // This catches any issues with the sorting order, which would cause all sorts of weird bugs
+ // This also allows us to simply extend the length of the previous slice to the new length
+ // Saving us an allocation
+ if (@ptrToInt(prev_package_ids_for_name.ptr) != @ptrToInt(entry.value_ptr.ptr)) {
+ Output.prettyErrorln(
+ \\<r><red>Fatal<r>: incorrect package sorting order detected in .jsb file.\n
+ \\This is a bug! Please create an issue.\n
+ \\If this bug blocks you from doing work, for now
+ \\please <b>avoid having multiple versions of <cyan>"{s}"<r> in the same bundle.\n
+ \\\n
+ \\- Jarred"
+ ,
+ .{
+ package_name,
+ },
+ );
+ Global.crash();
+ }
+
+ const end = prev_package_ids_for_name.len + 1;
+ // Assert we have enough room to add another package
+ std.debug.assert(end < remaining_names.len);
+ entry.value_ptr.* = prev_package_ids_for_name.ptr[0..end];
+ entry.value_ptr.*[end] = package_id;
+ } else {
+ prev_package_ids_for_name = remaining_names[0..1];
+ prev_package_ids_for_name[0] = package_id;
+ entry.value_ptr.* = prev_package_ids_for_name;
+ remaining_names = remaining_names[1..];
+ }
+ }
+
+ this.package_id_map = ids;
+ }
+
+ pub fn getPackageIDByHash(this: *const NodeModuleBundle, hash: BundledPackageID) ?u32 {
+ return this.package_id_map.get(hash);
+ }
+
+ pub fn getPackageIDByName(this: *const NodeModuleBundle, name: string) ?[]u32 {
+ return this.package_name_map.get(name);
+ }
+
+ pub fn getPackage(this: *const NodeModuleBundle, name: string) ?*const Api.JavascriptBundledPackage {
+ const package_id = this.getPackageID(name) orelse return null;
+ return &this.bundle.packages[@intCast(usize, package_id)];
+ }
+
+ pub fn hasModule(this: *const NodeModuleBundle, name: string) ?*const Api.JavascriptBundledPackage {
+ const package_id = this.getPackageID(name) orelse return null;
+ return &this.bundle.packages[@intCast(usize, package_id)];
+ }
+
+ pub const ModuleQuery = struct {
+ package: *const Api.JavascriptBundledPackage,
+ relative_path: string,
+ extensions: []string,
+ };
+
+ pub fn allocModuleImport(
+ this: *const NodeModuleBundle,
+ to: *const Api.JavascriptBundledModule,
+ allocator: *std.mem.Allocator,
+ ) !string {
+ return try std.fmt.allocPrint(
+ allocator,
+ "{x}/{s}",
+ .{
+ this.bundle.packages[to.package_id].hash,
+ this.str(to.path),
+ 123,
+ },
+ );
+ }
+
+ pub fn findModuleInPackageByPathWithoutPackageName(
+ this: *const NodeModuleBundle,
+ package: *const Api.JavascriptBundledPackage,
+ query: ModuleQuery,
+ ) ?Api.JavascriptBundledModule {
+ // const ModuleSearcher = struct {
+ // ctx: *const NodeModuleBundle,
+ // query: ModuleQuery,
+ // };
+ // std.sort.binarySearch(comptime T: type, key: T, items: []const T, context: anytype, comptime compareFn: fn(context:@TypeOf(context), lhs:T, rhs:T)math.Order)
+ }
+
+ pub fn findModuleInPackage(
+ this: *const NodeModuleBundle,
+ package: *const Api.JavascriptBundledPackage,
+ _query: string,
+ ) ?*const Api.JavascriptBundledModule {
+ const ModuleFinder = struct {
+ const Self = @This();
+ ctx: *const NodeModuleBundle,
+ pkg: *const Api.JavascriptBundledPackage,
+ query: string,
+
+ // Since the module doesn't necessarily exist, we use an integer overflow as the module name
+ pub fn moduleName(context: *const Self, module: *const Api.JavascriptBundledModule) string {
+ return if (module.path.offset == context.ctx.bundle.manifest_string.len) context.query else context.ctx.str(module.path);
+ }
+
+ pub fn cmpAsc(context: Self, lhs: Api.JavascriptBundledModule, rhs: Api.JavascriptBundledModule) std.math.Order {
+ // Comapre the module name
+ const lhs_name = context.moduleName(&lhs);
+ const rhs_name = context.moduleName(&rhs);
+ const VoidType = void;
+
+ const traversal_length = std.math.min(lhs_name.len, rhs_name.len);
+
+ for (lhs_name[0..traversal_length]) |char, i| {
+ switch (std.math.order(char, rhs_name[i])) {
+ .lt, .gt => |order| {
+ return order;
+ },
+ .eq => {},
+ }
+ }
+
+ return std.math.order(lhs_name.len, rhs_name.len);
+ }
+ };
+ var to_find = Api.JavascriptBundledModule{
+ .package_id = 0,
+ .code = .{},
+ .path = .{
+ .offset = @truncate(u32, this.bundle.manifest_string.len),
+ },
+ };
+
+ var finder = ModuleFinder{ .ctx = this, .pkg = package, .query = _query };
+
+ const modules = modulesIn(&this.bundle, package);
+ const module_id = std.sort.binarySearch(
+ Api.JavascriptBundledModule,
+ to_find,
+ modules,
+ finder,
+ ModuleFinder.cmpAsc,
+ ) orelse return null;
+ return &modules[module_id];
+ }
+
pub fn init(container: Api.JavascriptBundleContainer, allocator: *std.mem.Allocator) NodeModuleBundle {
return NodeModuleBundle{
.container = container,
.bundle = container.bundle.?,
.allocator = allocator,
+ .package_id_map = undefined,
+ .package_name_map = undefined,
+ .package_name_ids_ptr = undefined,
};
}
@@ -43,14 +241,19 @@ pub const NodeModuleBundle = struct {
var reader = schema.Reader.init(read_bytes, allocator);
var container = try Api.JavascriptBundleContainer.decode(&reader);
- return NodeModuleBundle{
+ var bundle = NodeModuleBundle{
.allocator = allocator,
.container = container,
.bundle = container.bundle.?,
.fd = stream.handle,
.bytes = read_bytes,
.bytes_ptr = file_bytes,
+ .package_id_map = undefined,
+ .package_name_map = undefined,
+ .package_name_ids_ptr = undefined,
};
+ try bundle.loadPackageMap();
+ return bundle;
}
pub fn str(bundle: *const NodeModuleBundle, pointer: Api.StringPointer) string {
@@ -58,7 +261,6 @@ pub const NodeModuleBundle = struct {
}
pub fn getPackageSize(this: *const NodeModuleBundle, pkg: Api.JavascriptBundledPackage) usize {
- const modules = this.bundle.modules[pkg.modules_offset .. pkg.modules_offset + pkg.modules_length];
var size: usize = 0;
for (modules) |module| {
size += module.code.length;
@@ -86,10 +288,11 @@ pub const NodeModuleBundle = struct {
);
for (modules) |module| {
- const size_level = switch (module.code.length) {
- 0...5_000 => SizeLevel.good,
- 5_001...74_999 => SizeLevel.neutral,
- else => SizeLevel.bad,
+ const size_level: SizeLevel =
+ switch (module.code.length) {
+ 0...5_000 => .good,
+ 5_001...74_999 => .neutral,
+ else => .bad,
};
Output.print(indent, .{});
diff --git a/src/options.zig b/src/options.zig
index 954e01dfa..56c72ff1a 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -7,6 +7,7 @@ const api = @import("./api/schema.zig");
const Api = api.Api;
const defines = @import("./defines.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
+const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
usingnamespace @import("global.zig");
@@ -96,7 +97,7 @@ pub const ExternalModules = struct {
.prefix = external[0..i],
.suffix = external[i + 1 .. external.len],
}) catch unreachable;
- } else if (resolver.Resolver.isPackagePath(external)) {
+ } else if (resolver.isPackagePath(external)) {
result.node_modules.insert(external) catch unreachable;
} else {
const normalized = validatePath(log, fs, cwd, external, allocator, "external path");
@@ -478,6 +479,60 @@ pub const DefaultUserDefines = struct {
};
};
+pub fn definesFromTransformOptions(allocator: *std.mem.Allocator, log: *logger.Log, _input_define: ?Api.StringMap) !*defines.Define {
+ var input_user_define = _input_define orelse std.mem.zeroes(Api.StringMap);
+
+ var user_defines = try stringHashMapFromArrays(
+ defines.RawDefines,
+ allocator,
+ input_user_define.keys,
+ input_user_define.values,
+ );
+ if (input_user_define.keys.len == 0) {
+ try user_defines.put(DefaultUserDefines.NodeEnv.Key, DefaultUserDefines.NodeEnv.Value);
+ }
+
+ var resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator);
+ return try defines.Define.init(
+ allocator,
+ resolved_defines,
+ );
+}
+
+pub fn loadersFromTransformOptions(allocator: *std.mem.Allocator, _loaders: ?Api.LoaderMap) !std.StringHashMap(Loader) {
+ var input_loaders = _loaders orelse std.mem.zeroes(Api.LoaderMap);
+ var loader_values = try allocator.alloc(Loader, input_loaders.loaders.len);
+ for (loader_values) |_, i| {
+ const loader = switch (input_loaders.loaders[i]) {
+ .jsx => Loader.jsx,
+ .js => Loader.js,
+ .ts => Loader.ts,
+ .css => Loader.css,
+ .tsx => Loader.tsx,
+ .json => Loader.json,
+ else => unreachable,
+ };
+
+ loader_values[i] = loader;
+ }
+
+ var loaders = try stringHashMapFromArrays(
+ std.StringHashMap(Loader),
+ allocator,
+ input_loaders.extensions,
+ loader_values,
+ );
+ const default_loader_ext = comptime [_]string{ ".jsx", ".json", ".js", ".mjs", ".css", ".ts", ".tsx" };
+
+ inline for (default_loader_ext) |ext| {
+ if (!loaders.contains(ext)) {
+ try loaders.put(ext, defaultLoaders.get(ext).?);
+ }
+ }
+
+ return loaders;
+}
+
pub const BundleOptions = struct {
footer: string = "",
banner: string = "",
@@ -497,6 +552,7 @@ pub const BundleOptions = struct {
preserve_symlinks: bool = false,
preserve_extensions: bool = false,
timings: Timings = Timings{},
+ node_modules_bundle: ?*NodeModuleBundle = null,
append_package_version_in_query_string: bool = false,
@@ -511,6 +567,8 @@ pub const BundleOptions = struct {
out_extensions: std.StringHashMap(string),
import_path_format: ImportPathFormat = ImportPathFormat.relative,
+ pub fn asJavascriptBundleConfig(this: *const BundleOptions) Api.JavascriptBundleConfig {}
+
pub const ImportPathFormat = enum {
relative,
// omit file extension for Node.js packages
@@ -531,44 +589,12 @@ pub const BundleOptions = struct {
log: *logger.Log,
transform: Api.TransformOptions,
) !BundleOptions {
- var loader_values = try allocator.alloc(Loader, transform.loader_values.len);
- for (loader_values) |_, i| {
- const loader = switch (transform.loader_values[i]) {
- .jsx => Loader.jsx,
- .js => Loader.js,
- .ts => Loader.ts,
- .css => Loader.css,
- .tsx => Loader.tsx,
- .json => Loader.json,
- else => unreachable,
- };
-
- loader_values[i] = loader;
- }
-
- var loaders = try stringHashMapFromArrays(std.StringHashMap(Loader), allocator, transform.loader_keys, loader_values);
- const default_loader_ext = [_]string{ ".jsx", ".json", ".js", ".mjs", ".css", ".ts", ".tsx" };
- inline for (default_loader_ext) |ext| {
- if (!loaders.contains(ext)) {
- try loaders.put(ext, defaultLoaders.get(ext).?);
- }
- }
-
- var user_defines = try stringHashMapFromArrays(defines.RawDefines, allocator, transform.define_keys, transform.define_values);
- if (transform.define_keys.len == 0) {
- try user_defines.put(DefaultUserDefines.NodeEnv.Key, DefaultUserDefines.NodeEnv.Value);
- }
-
- var resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator);
const output_dir_parts = [_]string{ try std.process.getCwdAlloc(allocator), transform.output_dir orelse "out" };
var opts: BundleOptions = BundleOptions{
.log = log,
.resolve_mode = transform.resolve orelse .dev,
- .define = try defines.Define.init(
- allocator,
- resolved_defines,
- ),
- .loaders = loaders,
+ .define = try definesFromTransformOptions(allocator, log, transform.define),
+ .loaders = try loadersFromTransformOptions(allocator, transform.loaders),
.output_dir = try fs.absAlloc(allocator, &output_dir_parts),
.platform = Platform.from(transform.platform),
.write = transform.write orelse false,
@@ -665,6 +691,46 @@ pub const BundleOptions = struct {
opts.output_dir_handle = try openOutputDir(opts.output_dir);
}
+ if (opts.resolve_mode == .lazy and !(transform.generate_node_module_bundle orelse false)) {
+ if (transform.node_modules_bundle_path) |bundle_path| {
+ if (bundle_path.len > 0) {
+ load_bundle: {
+ const pretty_path = fs.relativeTo(bundle_path);
+ var bundle_file = std.fs.openFileAbsolute(bundle_path, .{ .read = true, .write = true }) catch |err| {
+ Output.disableBuffering();
+ Output.prettyErrorln("<r>error opening <d>\"<r><b>{s}<r><d>\":<r> <b><red>{s}<r>", .{ pretty_path, @errorName(err) });
+ break :load_bundle;
+ };
+
+ const time_start = std.time.nanoTimestamp();
+ if (NodeModuleBundle.loadBundle(allocator, bundle_file)) |bundle| {
+ var node_module_bundle = try allocator.create(NodeModuleBundle);
+ node_module_bundle.* = bundle;
+ opts.node_modules_bundle = node_module_bundle;
+ const elapsed = @intToFloat(f64, (std.time.nanoTimestamp() - time_start)) / std.time.ns_per_ms;
+ Output.prettyErrorln(
+ "<r><b><d>\"{s}\"<r><d> - {d} modules, {d} packages <b>[{d:>.2}ms]<r>",
+ .{
+ pretty_path,
+ node_module_bundle.bundle.modules.len,
+ node_module_bundle.bundle.packages.len,
+ elapsed,
+ },
+ );
+ Output.flush();
+ } else |err| {
+ Output.disableBuffering();
+ Output.prettyErrorln(
+ "<r>error reading <d>\"<r><b>{s}<r><d>\":<r> <b><red>{s}<r>, <b>deleting it<r> so you don't keep seeing this message.",
+ .{ pretty_path, @errorName(err) },
+ );
+ bundle_file.close();
+ }
+ }
+ }
+ }
+ }
+
return opts;
}
};
@@ -673,12 +739,12 @@ pub fn openOutputDir(output_dir: string) !std.fs.Dir {
return std.fs.openDirAbsolute(output_dir, std.fs.Dir.OpenDirOptions{}) catch brk: {
std.fs.makeDirAbsolute(output_dir) catch |err| {
Output.printErrorln("error: Unable to mkdir \"{s}\": \"{s}\"", .{ output_dir, @errorName(err) });
- std.os.exit(1);
+ Global.crash();
};
var handle = std.fs.openDirAbsolute(output_dir, std.fs.Dir.OpenDirOptions{}) catch |err2| {
Output.printErrorln("error: Unable to open \"{s}\": \"{s}\"", .{ output_dir, @errorName(err2) });
- std.os.exit(1);
+ Global.crash();
};
break :brk handle;
};
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 596a60c57..e8c8a98b5 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -11,13 +11,14 @@ const resolver = @import("./resolver.zig");
const MainFieldMap = std.StringHashMap(string);
const BrowserMap = std.StringHashMap(string);
-
+threadlocal var hashed_buf: [2048]u8 = undefined;
pub const PackageJSON = struct {
name: string = "",
source: logger.Source,
main_fields: MainFieldMap,
module_type: options.ModuleType,
version: string = "",
+ hash: u32 = 0,
// Present if the "browser" field is present. This field is intended to be
// used by bundlers and lets you redirect the paths of certain 3rd-party
@@ -46,7 +47,13 @@ pub const PackageJSON = struct {
//
browser_map: BrowserMap,
- pub fn parse(comptime ResolverType: type, r: *ResolverType, input_path: string, dirname_fd: StoredFileDescriptorType) ?PackageJSON {
+ pub fn parse(
+ comptime ResolverType: type,
+ r: *ResolverType,
+ input_path: string,
+ dirname_fd: StoredFileDescriptorType,
+ comptime generate_hash: bool,
+ ) ?PackageJSON {
const parts = [_]string{ input_path, "package.json" };
const package_json_path_ = r.fs.abs(&parts);
@@ -190,6 +197,13 @@ pub const PackageJSON = struct {
// TODO: side effects
// TODO: exports map
+ if (generate_hash) {
+ std.mem.copy(u8, &hashed_buf, package_json.name);
+ hashed_buf[package_json.name.len + 1] = '@';
+ std.mem.copy(u8, hashed_buf[package_json.name.len + 1 ..], package_json.version);
+ package_json.hash = @truncate(u32, std.hash.Wyhash.hash(0, hashed_buf[0 .. package_json.name.len + 1 + package_json.version.len]));
+ }
+
return package_json;
}
};
diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig
index 8e3e5a718..2bbd83c55 100644
--- a/src/resolver/resolve_path.zig
+++ b/src/resolver/resolve_path.zig
@@ -149,6 +149,34 @@ pub fn longestCommonPathGeneric(strings: []const []const u8, comptime separator:
return strings[0][0 .. last_common_separator + 1];
}
+const sep_posix_str = &([_]u8{std.fs.path.sep_posix});
+const node_modules_root = "node_modules" ++ std.fs.path.sep_str;
+
+pub const PackageRelativePath = struct { base_path: string, package_name: string };
+pub fn packageRelative(absolute_path: string) ?PackageRelativePath {
+ if (std.Target.current.os.tag == .windows) {
+ @compileError("Not implemented in windows");
+ }
+
+ const node_modules_index = std.mem.lastIndexOf(u8, absolute_path, node_modules_root) orelse return null;
+ const current_path = absolute_path[node_modules_index + node_modules_root.len + 1 ..];
+ return packageRelativeFromNodeModulesFolder(current_path);
+}
+
+pub fn packageRelativeFromNodeModulesFolder(current_path: string) ?PackageRelativePath {
+ if (std.Target.current.os.tag == .windows) {
+ @compileError("Not implemented in windows");
+ }
+
+ const package_name_end = std.mem.indexOfScalar(u8, current_path, std.fs.path.sep) orelse return null;
+ const package_name = current_path[0..package_name_end];
+
+ return PackageRelativePath{
+ .base_path = current_path[package_name_end + 1 ..],
+ .package_name = package_name,
+ };
+}
+
pub fn longestCommonPath(strings: []const []const u8) []const u8 {
return longestCommonPathGeneric(strings, '/', isSepAny);
}
@@ -678,7 +706,7 @@ pub fn joinAbsStringBuf(_cwd: []const u8, buf: []u8, _parts: anytype, comptime _
return _cwd;
}
- if ((_platform == .loose or _platform == .posix) and parts.len == 1 and parts[0].len == 1 and parts[0] == std.fs.path.sep_posix) {
+ if ((_platform == .loose or _platform == .posix) and parts.len == 1 and parts[0].len == 1 and parts[0][0] == std.fs.path.sep_posix) {
return "/";
}
@@ -859,50 +887,50 @@ test "joinAbsStringPosix" {
var t = tester.Tester.t(std.heap.c_allocator);
defer t.report(@src());
const string = []const u8;
- const cwd = "/Users/jarredsumner/Code/app";
+ const cwd = "/Users/jarredsumner/Code/app/";
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/bar/file.js",
- joinAbsString(cwd, [_]string{ "foo", "bar", "file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "foo", "bar", "file.js" }, .posix),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "foo", "bar", "../file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "foo", "bar", "../file.js" }, .posix),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "foo", "./bar", "../file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "foo", "./bar", "../file.js" }, .posix),
@src(),
);
_ = t.expect(
- "/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "", "../../file.js" }, .posix),
+ "/Users/jarredsumner/file.js",
+ joinAbsString(cwd, &[_]string{ "", "../../file.js" }, .posix),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "././././foo", "././././bar././././", "../file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "././././foo", "././././bar././././", "../file.js" }, .posix),
@src(),
);
_ = t.expect(
"/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", "././././bar././././", "../file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", "././././bar././././", "../file.js" }, .posix),
@src(),
);
_ = t.expect(
"/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", ".", "././././bar././././", ".", "../file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", ".", "././././bar././././", ".", "../file.js" }, .posix),
@src(),
);
_ = t.expect(
"/Code/app/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", "..", "././././bar././././", ".", "../file.js" }, .posix),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", "..", "././././bar././././", ".", "../file.js" }, .posix),
@src(),
);
}
@@ -915,81 +943,81 @@ test "joinAbsStringLoose" {
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/bar/file.js",
- joinAbsString(cwd, [_]string{ "foo", "bar", "file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "foo", "bar", "file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "foo", "bar", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "foo", "bar", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "foo", "./bar", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "foo", "./bar", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "././././foo", "././././bar././././", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "././././foo", "././././bar././././", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", "././././bar././././", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", "././././bar././././", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", ".", "././././bar././././", ".", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", ".", "././././bar././././", ".", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Code/app/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", "..", "././././bar././././", ".", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", "..", "././././bar././././", ".", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/bar/file.js",
- joinAbsString(cwd, [_]string{ "foo", "bar", "file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "foo", "bar", "file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "foo", "bar", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "foo", "bar", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "foo", "./bar", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "foo", "./bar", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Users/jarredsumner/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ ".\\.\\.\\.\\foo", "././././bar././././", "..\\file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ ".\\.\\.\\.\\foo", "././././bar././././", "..\\file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", "././././bar././././", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", "././././bar././././", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Code/app/foo/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", ".", "././././bar././././", ".", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", ".", "././././bar././././", ".", "../file.js" }, .loose),
@src(),
);
_ = t.expect(
"/Code/app/file.js",
- joinAbsString(cwd, [_]string{ "/Code/app", "././././foo", "..", "././././bar././././", ".", "../file.js" }, .loose),
+ joinAbsString(cwd, &[_]string{ "/Code/app", "././././foo", "..", "././././bar././././", ".", "../file.js" }, .loose),
@src(),
);
}
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 489dbbfed..6819fe3f0 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -19,6 +19,12 @@ const StringBoolMap = std.StringHashMap(bool);
const allocators = @import("../allocators.zig");
const Path = Fs.Path;
+const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
+
+pub fn isPackagePath(path: string) bool {
+ // this could probably be flattened into something more optimized
+ return path[0] != '/' and !strings.startsWith(path, "./") and !strings.startsWith(path, "../") and !strings.eql(path, ".") and !strings.eql(path, "..");
+}
pub const SideEffectsData = struct {
source: *logger.Source,
@@ -120,8 +126,7 @@ pub const Result = struct {
jsx: options.JSX.Pragma = options.JSX.Pragma{},
- package_json_version: ?string = null,
- package_json_name: ?string = null,
+ package_json: ?*PackageJSON = null,
is_external: bool = false,
@@ -281,8 +286,7 @@ pub const MatchResult = struct {
dirname_fd: StoredFileDescriptorType = 0,
file_fd: StoredFileDescriptorType = 0,
is_node_module: bool = false,
- package_json_version: ?string = null,
- package_json_name: ?string = null,
+ package_json: ?*PackageJSON = null,
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
};
@@ -304,6 +308,7 @@ pub fn NewResolver(cache_files: bool) type {
fs: *Fs.FileSystem,
log: *logger.Log,
allocator: *std.mem.Allocator,
+ node_module_bundle: ?*NodeModuleBundle,
debug_logs: ?DebugLogs = null,
elapsed: i128 = 0, // tracing
@@ -367,6 +372,7 @@ pub fn NewResolver(cache_files: bool) type {
.caches = CacheSet.init(allocator),
.opts = opts,
.fs = _fs,
+ .node_module_bundle = opts.node_modules_bundle,
.log = log,
};
}
@@ -521,8 +527,7 @@ pub fn NewResolver(cache_files: bool) type {
.diff_case = res.diff_case,
.dirname_fd = dir_info.getFileDescriptor(),
.is_from_node_modules = res.is_node_module,
- .package_json_name = res.package_json_name,
- .package_json_version = res.package_json_version,
+ .package_json = res.package_json,
};
}
}
@@ -551,8 +556,7 @@ pub fn NewResolver(cache_files: bool) type {
.path_pair = entry.path_pair,
.diff_case = entry.diff_case,
.is_from_node_modules = entry.is_node_module,
- .package_json_name = entry.package_json_name,
- .package_json_version = entry.package_json_version,
+ .package_json = entry.package_json,
};
}
@@ -610,8 +614,7 @@ pub fn NewResolver(cache_files: bool) type {
.is_from_node_modules = _result.is_node_module,
.module_type = pkg.module_type,
.dirname_fd = _result.dirname_fd,
- .package_json_version = pkg.version,
- .package_json_name = pkg.name,
+ .package_json = pkg,
};
check_relative = false;
check_package = false;
@@ -629,8 +632,7 @@ pub fn NewResolver(cache_files: bool) type {
.diff_case = res.diff_case,
.is_from_node_modules = res.is_node_module,
.dirname_fd = res.dirname_fd,
- .package_json_version = res.package_json_version,
- .package_json_name = res.package_json_name,
+ .package_json = res.package_json,
};
} else if (!check_package) {
return null;
@@ -679,8 +681,7 @@ pub fn NewResolver(cache_files: bool) type {
.dirname_fd = node_module.dirname_fd,
.diff_case = node_module.diff_case,
.is_from_node_modules = true,
- .package_json_version = package_json.version,
- .package_json_name = package_json.name,
+ .package_json = package_json,
};
}
} else {
@@ -702,8 +703,7 @@ pub fn NewResolver(cache_files: bool) type {
.diff_case = res.diff_case,
.is_from_node_modules = res.is_node_module,
.dirname_fd = res.dirname_fd,
- .package_json_version = res.package_json_version,
- .package_json_name = res.package_json_name,
+ .package_json = res.package_json,
};
} else {
// Note: node's "self references" are not currently supported
@@ -719,8 +719,7 @@ pub fn NewResolver(cache_files: bool) type {
const pkg_json = dir_info.package_json orelse continue;
const rel_path = r.fs.relative(pkg_json.source.key_path.text, path.text);
result.module_type = pkg_json.module_type;
- result.package_json_version = if (result.package_json_version == null) pkg_json.version else result.package_json_version;
- result.package_json_name = if (result.package_json_name == null) pkg_json.name else result.package_json_name;
+ result.package_json = result.package_json orelse pkg_json;
if (r.checkBrowserMap(pkg_json, rel_path)) |remapped| {
if (remapped.len == 0) {
path.is_disabled = true;
@@ -888,17 +887,12 @@ pub fn NewResolver(cache_files: bool) type {
}
pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
- const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd) orelse return null;
+ const pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, !cache_files) orelse return null;
var _pkg = try r.allocator.create(PackageJSON);
_pkg.* = pkg;
return _pkg;
}
- pub fn isPackagePath(path: string) bool {
- // this could probably be flattened into something more optimized
- return path[0] != '/' and !strings.startsWith(path, "./") and !strings.startsWith(path, "../") and !strings.eql(path, ".") and !strings.eql(path, "..");
- }
-
fn dirInfoCached(r: *ThisResolver, path: string) !?*DirInfo {
const top_result = try r.dir_cache.getOrPut(path);
if (top_result.status != .unknown) {
@@ -1322,8 +1316,7 @@ pub fn NewResolver(cache_files: bool) type {
.path_pair = PathPair{
.primary = _path,
},
- .package_json_version = browser_json.version,
- .package_json_name = browser_json.name,
+ .package_json = browser_json,
};
}
@@ -1339,8 +1332,7 @@ pub fn NewResolver(cache_files: bool) type {
if (dir_info.package_json) |package_json| {
return MatchResult{
.path_pair = PathPair{ .primary = Fs.Path.init(result.path) },
- .package_json_name = package_json.name,
- .package_json_version = package_json.version,
+ .package_json = package_json,
.dirname_fd = result.dirname_fd,
};
}
@@ -1383,8 +1375,7 @@ pub fn NewResolver(cache_files: bool) type {
return MatchResult{
.path_pair = .{ .primary = Path.init(out_buf) },
.diff_case = lookup.diff_case,
- .package_json_name = package_json.name,
- .package_json_version = package_json.version,
+ .package_json = package_json,
.dirname_fd = dir_info.getFileDescriptor(),
};
}
@@ -1423,8 +1414,7 @@ pub fn NewResolver(cache_files: bool) type {
.path_pair = PathPair{
.primary = _path,
},
- .package_json_version = browser_json.version,
- .package_json_name = browser_json.name,
+ .package_json = browser_json,
};
}
@@ -1469,8 +1459,7 @@ pub fn NewResolver(cache_files: bool) type {
.path_pair = .{ .primary = Path.init(file.path) },
.diff_case = file.diff_case,
.dirname_fd = file.dirname_fd,
- .package_json_name = package_json.name,
- .package_json_version = package_json.version,
+ .package_json = package_json,
};
}
}
@@ -1497,13 +1486,11 @@ pub fn NewResolver(cache_files: bool) type {
}
const dir_info = (r.dirInfoCached(path) catch null) orelse return null;
- var package_json_version: ?string = null;
- var package_json_name: ?string = null;
+ var package_json: ?*PackageJSON = null;
// Try using the main field(s) from "package.json"
if (dir_info.package_json) |pkg_json| {
- package_json_version = pkg_json.version;
- package_json_name = pkg_json.name;
+ package_json = pkg_json;
if (pkg_json.main_fields.count() > 0) {
const main_field_values = pkg_json.main_fields;
const main_field_keys = r.opts.main_fields;
@@ -1564,8 +1551,7 @@ pub fn NewResolver(cache_files: bool) type {
},
.diff_case = auto_main_result.diff_case,
.dirname_fd = auto_main_result.dirname_fd,
- .package_json_version = pkg_json.version,
- .package_json_name = pkg_json.name,
+ .package_json = package_json,
};
} else {
if (r.debug_logs) |*debug| {
@@ -1576,15 +1562,13 @@ pub fn NewResolver(cache_files: bool) type {
}) catch {};
}
var _auto_main_result = auto_main_result;
- _auto_main_result.package_json_version = pkg_json.version;
- _auto_main_result.package_json_name = pkg_json.name;
+ _auto_main_result.package_json = package_json;
return _auto_main_result;
}
}
}
- if (_result.package_json_version == null) _result.package_json_version = package_json_version;
- if (_result.package_json_name == null) _result.package_json_name = package_json_name;
+ _result.package_json = _result.package_json orelse package_json;
return _result;
}
}
@@ -1592,13 +1576,7 @@ pub fn NewResolver(cache_files: bool) type {
// Look for an "index" file with known extensions
if (r.loadAsIndexWithBrowserRemapping(dir_info, path, extension_order)) |*res| {
- if (res.package_json_version == null and package_json_version != null) {
- res.package_json_version = package_json_version;
- }
-
- if (res.package_json_name == null and package_json_name != null) {
- res.package_json_name = package_json_name;
- }
+ res.package_json = res.package_json orelse package_json;
return res.*;
}
@@ -1881,5 +1859,9 @@ pub fn NewResolver(cache_files: bool) type {
};
}
-pub const Resolver = NewResolver(true);
-pub const ResolverUncached = NewResolver(false);
+pub const Resolver = NewResolver(
+ true,
+);
+pub const ResolverUncached = NewResolver(
+ false,
+);
diff --git a/src/runtime.js b/src/runtime.js
index e00271303..5ac6d3a93 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -53,6 +53,10 @@ export var __commonJS =
var require_cache = new WeakMap();
+export var __SPEEDY_INTERNAL_DO_NOT_USE_OR_YOU_WILL_BE_FIRED = {
+ RequireFailedError: class RequireFailedError {},
+};
+
export var __require = (namespace) => {
var entry = require_cache.get(namespace);
if (typeof entry !== "undefined") {
@@ -65,6 +69,16 @@ export var __require = (namespace) => {
? namespace["default"]
: namespace;
+ if (typeof target !== "function") {
+ throw new __SPEEDY_INTERNAL_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.RequireFailedError(
+ `Couldn't find module "${
+ typeof namespace === "string"
+ ? namespace
+ : namespace.name || namespace.displayName || namespace.toString()
+ }"`
+ );
+ }
+
var exports = target();
require_cache.set(namespace, exports);
return exports;
@@ -81,3 +95,21 @@ export var __name = (target, name) => {
};
export const __esModule = true;
+
+// Used to implement ES6 exports to CommonJS
+export var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+
+export var __reExport = (target, module, desc) => {
+ if ((module && typeof module === "object") || typeof module === "function")
+ for (let key of __getOwnPropNames(module))
+ if (!__hasOwnProp.call(target, key) && key !== "default")
+ __defProp(target, key, {
+ get: () => module[key],
+ enumerable:
+ !(desc = __getOwnPropDesc(module, key)) || desc.enumerable,
+ });
+ return target;
+};
diff --git a/src/runtime.version b/src/runtime.version
index 9a0793a08..e8750c9a1 100644
--- a/src/runtime.version
+++ b/src/runtime.version
@@ -1 +1 @@
-6c20b700cd52b930 \ No newline at end of file
+e9b61815176778be \ No newline at end of file
diff --git a/src/runtime.zig b/src/runtime.zig
index 8f97718d8..6f53af6ad 100644
--- a/src/runtime.zig
+++ b/src/runtime.zig
@@ -19,8 +19,17 @@ pub const Runtime = struct {
__toModule: ?Ref = null,
__commonJS: ?Ref = null,
__require: ?Ref = null,
-
- pub const all = [_][]const u8{ "__name", "__toModule", "__require", "__commonJS" };
+ __export: ?Ref = null,
+ __reExport: ?Ref = null,
+
+ pub const all = [_][]const u8{
+ "__name",
+ "__toModule",
+ "__require",
+ "__commonJS",
+ "__export",
+ "__reExport",
+ };
pub const Name = "<RUNTIME";
pub const Iterator = struct {
@@ -58,6 +67,16 @@ pub const Runtime = struct {
return Entry{ .key = 3, .value = val };
}
},
+ 4 => {
+ if (@field(this.runtime_imports, all[4])) |val| {
+ return Entry{ .key = 4, .value = val };
+ }
+ },
+ 5 => {
+ if (@field(this.runtime_imports, all[5])) |val| {
+ return Entry{ .key = 5, .value = val };
+ }
+ },
else => {
return null;
},
@@ -106,6 +125,8 @@ pub const Runtime = struct {
1 => @field(imports, all[1]),
2 => @field(imports, all[2]),
3 => @field(imports, all[3]),
+ 4 => @field(imports, all[4]),
+ 5 => @field(imports, all[5]),
else => null,
};
}