aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.vscode/settings.json4
-rw-r--r--src/bundler.zig174
-rw-r--r--src/fs.zig17
-rw-r--r--src/js_parser/js_parser.zig399
-rw-r--r--src/js_printer.zig9
-rw-r--r--src/resolver/dir_info.zig8
-rw-r--r--src/resolver/package_json.zig2
-rw-r--r--src/resolver/resolver.zig439
-rw-r--r--src/runtime.js2
-rw-r--r--src/runtime.version2
-rw-r--r--src/runtime.zig98
-rw-r--r--src/string_immutable.zig4
-rw-r--r--src/string_types.zig40
13 files changed, 735 insertions, 463 deletions
diff --git a/.vscode/settings.json b/.vscode/settings.json
index b1ac1fa32..689016444 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -118,5 +118,7 @@
"set": "cpp",
"__memory": "cpp",
"memory_resource": "cpp"
- }
+ },
+ "go.logging.level": "off",
+ "cmake.configureOnOpen": false
}
diff --git a/src/bundler.zig b/src/bundler.zig
index 153a29833..c73ce901d 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -835,6 +835,10 @@ pub fn NewBundler(cache_files: bool) type {
try this.pool.start(this);
try this.pool.wait(this);
+ // if (comptime !isRelease) {
+ // this.queue.checkDuplicatesSlow();
+ // }
+
if (this.log.errors > 0) {
tmpfile.close();
tmpdir.deleteFile(std.mem.span(tmpname)) catch {};
@@ -876,6 +880,33 @@ pub fn NewBundler(cache_files: bool) type {
this,
GenerateNodeModuleBundle.sortJavascriptModuleByPath,
);
+
+ if (comptime isDebug) {
+ const SeenHash = std.AutoHashMap(u64, void);
+ var map = SeenHash.init(this.allocator);
+ var ids = SeenHash.init(this.allocator);
+ try map.ensureTotalCapacity(@truncate(u32, this.module_list.items.len));
+ try ids.ensureTotalCapacity(@truncate(u32, this.module_list.items.len));
+
+ for (this.module_list.items) |a| {
+ const a_pkg: Api.JavascriptBundledPackage = this.package_list.items[a.package_id];
+ const a_name = this.metadataStringPointer(a_pkg.name);
+ const a_version = this.metadataStringPointer(a_pkg.version);
+ const a_path = this.metadataStringPointer(a.path);
+
+ std.debug.assert(a_name.len > 0);
+ std.debug.assert(a_version.len > 0);
+ std.debug.assert(a_path.len > 0);
+ var hash_print = std.mem.zeroes([4096]u8);
+ const hash = std.hash.Wyhash.hash(0, std.fmt.bufPrint(&hash_print, "{s}@{s}/{s}", .{ a_name, a_version, a_path }) catch unreachable);
+ var result1 = map.getOrPutAssumeCapacity(hash);
+ std.debug.assert(!result1.found_existing);
+
+ var result2 = ids.getOrPutAssumeCapacity(a.id);
+ std.debug.assert(!result2.found_existing);
+ }
+ }
+
var hasher = std.hash.Wyhash.init(0);
// We want to sort the packages as well as the files
@@ -1004,36 +1035,6 @@ pub fn NewBundler(cache_files: bool) type {
// Since we trim the prefixes, we must also compare the package name and version
pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool {
- if (comptime isDebug) {
- const a_pkg: Api.JavascriptBundledPackage = ctx.package_list.items[a.package_id];
- const b_pkg: Api.JavascriptBundledPackage = ctx.package_list.items[b.package_id];
- const a_name = ctx.metadataStringPointer(a_pkg.name);
- const b_name = ctx.metadataStringPointer(b_pkg.name);
- const a_version = ctx.metadataStringPointer(a_pkg.version);
- const b_version = ctx.metadataStringPointer(b_pkg.version);
- const a_path = ctx.metadataStringPointer(a.path);
- const b_path = ctx.metadataStringPointer(b.path);
-
- std.debug.assert(a_name.len > 0);
- std.debug.assert(b_name.len > 0);
- std.debug.assert(a_version.len > 0);
- std.debug.assert(b_version.len > 0);
- std.debug.assert(a_path.len > 0);
- std.debug.assert(b_path.len > 0);
-
- if (strings.eql(a_name, b_name)) {
- if (strings.eql(a_version, b_version)) {
- std.debug.assert(a_pkg.hash == b_pkg.hash); // hash collision
- std.debug.assert(a.package_id == b.package_id); // duplicate package
- std.debug.assert(!strings.eql(a_path, b_path)); // duplicate module
- } else {
- std.debug.assert(a_pkg.hash != b_pkg.hash); // incorrectly generated hash
- }
- } else {
- std.debug.assert(a_pkg.hash != b_pkg.hash); // incorrectly generated hash
- }
- }
-
return switch (std.mem.order(
u8,
ctx.metadataStringPointer(
@@ -1138,6 +1139,57 @@ pub fn NewBundler(cache_files: bool) type {
};
var json_ast_symbols_list = std.mem.span(&json_ast_symbols);
threadlocal var override_file_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+
+ pub fn appendToModuleList(
+ this: *GenerateNodeModuleBundle,
+ package: *const PackageJSON,
+ module_id: u32,
+ code_offset: u32,
+ package_relative_path: string,
+ ) !void {
+ this.list_lock.lock();
+ defer this.list_lock.unlock();
+
+ const code_length = @atomicLoad(u32, &this.tmpfile_byte_offset, .SeqCst) - code_offset;
+
+ if (comptime isDebug) {
+ std.debug.assert(code_length > 0);
+ std.debug.assert(package.hash != 0);
+ std.debug.assert(package.version.len > 0);
+ std.debug.assert(package.name.len > 0);
+ std.debug.assert(module_id > 0);
+ }
+
+ var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash);
+
+ if (!package_get_or_put_entry.found_existing) {
+ package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len);
+ try this.package_list.append(
+ Api.JavascriptBundledPackage{
+ .name = try this.appendHeaderString(package.name),
+ .version = try this.appendHeaderString(package.version),
+ .hash = package.hash,
+ },
+ );
+ this.has_jsx = this.has_jsx or strings.eql(package.name, this.bundler.options.jsx.package_name);
+ }
+
+ var path_extname_length = @truncate(u8, std.fs.path.extension(package_relative_path).len);
+ try this.module_list.append(
+ Api.JavascriptBundledModule{
+ .path = try this.appendHeaderString(
+ package_relative_path,
+ ),
+ .path_extname_length = path_extname_length,
+ .package_id = package_get_or_put_entry.value_ptr.*,
+ .id = module_id,
+ .code = Api.StringPointer{
+ .length = @truncate(u32, code_length),
+ .offset = @truncate(u32, code_offset),
+ },
+ },
+ );
+ }
pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, _resolve: _resolver.Result) !void {
const resolve = _resolve;
if (resolve.is_external) return;
@@ -1372,11 +1424,11 @@ pub fn NewBundler(cache_files: bool) type {
ast = js_ast.Ast.initTest(parts);
ast.runtime_imports = runtime.Runtime.Imports{};
- ast.runtime_imports.register = Ref{ .source_index = 0, .inner_index = 0 };
- ast.runtime_imports.__export = Ref{ .source_index = 0, .inner_index = 1 };
+ ast.runtime_imports.@"$$m" = .{ .ref = Ref{ .source_index = 0, .inner_index = 0 }, .primary = Ref.None, .backup = Ref.None };
+ ast.runtime_imports.__export = .{ .ref = Ref{ .source_index = 0, .inner_index = 1 }, .primary = Ref.None, .backup = Ref.None };
ast.symbols = json_ast_symbols_list;
ast.module_ref = Ref{ .source_index = 0, .inner_index = 2 };
- ast.exports_ref = ast.runtime_imports.__export;
+ ast.exports_ref = ast.runtime_imports.__export.?.ref;
ast.bundle_export_ref = Ref{ .source_index = 0, .inner_index = 3 };
} else {
var parts = &[_]js_ast.Part{};
@@ -1396,9 +1448,7 @@ pub fn NewBundler(cache_files: bool) type {
code_offset = try this.writeEmptyModule(module_data.package_path, module_id);
},
else => {
- // const load_from_symbol_ref = ast.runtime_imports.$$r.?;
- // const reexport_ref = ast.runtime_imports.__reExport.?;
- const register_ref = ast.runtime_imports.register.?;
+ const register_ref = ast.runtime_imports.@"$$m".?.ref;
const E = js_ast.E;
const Expr = js_ast.Expr;
const Stmt = js_ast.Stmt;
@@ -1480,7 +1530,7 @@ pub fn NewBundler(cache_files: bool) type {
};
var register_expr = Expr{ .loc = call_register.target.loc, .data = .{ .e_call = &call_register } };
var decls: [1]js_ast.G.Decl = undefined;
- var bundle_export_binding = js_ast.B.Identifier{ .ref = ast.bundle_export_ref.? };
+ var bundle_export_binding = js_ast.B.Identifier{ .ref = ast.runtime_imports.@"$$m".?.ref };
var binding = js_ast.Binding{
.loc = register_expr.loc,
.data = .{ .b_identifier = &bundle_export_binding },
@@ -1512,7 +1562,7 @@ pub fn NewBundler(cache_files: bool) type {
false,
js_printer.Options{
.to_module_ref = Ref.RuntimeRef,
- .bundle_export_ref = ast.bundle_export_ref.?,
+ .bundle_export_ref = ast.runtime_imports.@"$$m".?.ref,
.source_path = file_path,
.externals = ast.externals,
.indent = 0,
@@ -1528,10 +1578,10 @@ pub fn NewBundler(cache_files: bool) type {
std.fs.File,
this.tmpfile,
std.fs.File.getPos,
+ &this.tmpfile_byte_offset,
);
code_offset = write_result.off;
- this.tmpfile_byte_offset = write_result.end_off;
},
}
}
@@ -1542,47 +1592,11 @@ pub fn NewBundler(cache_files: bool) type {
std.debug.assert(package_relative_path.len > 0);
}
- this.list_lock.lock();
- defer this.list_lock.unlock();
-
- const code_length = this.tmpfile_byte_offset - code_offset;
-
- if (comptime isDebug) {
- std.debug.assert(code_length > 0);
- std.debug.assert(package.hash != 0);
- std.debug.assert(package.version.len > 0);
- std.debug.assert(package.name.len > 0);
- std.debug.assert(module_id > 0);
- }
-
- var package_get_or_put_entry = try this.package_list_map.getOrPut(package.hash);
-
- if (!package_get_or_put_entry.found_existing) {
- package_get_or_put_entry.value_ptr.* = @truncate(u32, this.package_list.items.len);
- try this.package_list.append(
- Api.JavascriptBundledPackage{
- .name = try this.appendHeaderString(package.name),
- .version = try this.appendHeaderString(package.version),
- .hash = package.hash,
- },
- );
- this.has_jsx = this.has_jsx or strings.eql(package.name, this.bundler.options.jsx.package_name);
- }
-
- var path_extname_length = @truncate(u8, std.fs.path.extension(package_relative_path).len);
- try this.module_list.append(
- Api.JavascriptBundledModule{
- .path = try this.appendHeaderString(
- package_relative_path,
- ),
- .path_extname_length = path_extname_length,
- .package_id = package_get_or_put_entry.value_ptr.*,
- .id = module_id,
- .code = Api.StringPointer{
- .length = @truncate(u32, code_length),
- .offset = @truncate(u32, code_offset),
- },
- },
+ try this.appendToModuleList(
+ package,
+ module_id,
+ code_offset,
+ package_relative_path,
);
} else {
// If it's app code, scan but do not fully parse.
diff --git a/src/fs.zig b/src/fs.zig
index a64cb7555..4676211d0 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -202,7 +202,7 @@ pub const FileSystem = struct {
// for each entry was a big performance issue for that package.
.need_stat = entry.kind == .SymLink,
.cache = Entry.Cache{
- .symlink = "",
+ .symlink = PathString.empty,
.kind = _kind,
},
},
@@ -327,6 +327,8 @@ pub const FileSystem = struct {
mutex: Mutex,
need_stat: bool = true,
+ abs_path: PathString = PathString.empty,
+
pub inline fn base(this: *const Entry) string {
return this.base_.slice();
}
@@ -350,12 +352,12 @@ pub const FileSystem = struct {
e.base_.deinit(allocator);
allocator.free(e.dir);
- allocator.free(e.cache.symlink);
+ allocator.free(e.cache.symlink.slice());
allocator.destroy(e);
}
pub const Cache = struct {
- symlink: string = "",
+ symlink: PathString = PathString.empty,
fd: StoredFileDescriptorType = 0,
kind: Kind = Kind.file,
};
@@ -378,7 +380,7 @@ pub const FileSystem = struct {
entry.need_stat = false;
entry.cache = fs.kind(entry.dir, entry.base(), entry.cache.fd) catch unreachable;
}
- return entry.cache.symlink;
+ return entry.cache.symlink.slice();
}
};
@@ -901,7 +903,10 @@ pub const FileSystem = struct {
var stat = try C.lstat_absolute(absolute_path_c);
const is_symlink = stat.kind == std.fs.File.Kind.SymLink;
var _kind = stat.kind;
- var cache = Entry.Cache{ .kind = Entry.Kind.file, .symlink = "" };
+ var cache = Entry.Cache{
+ .kind = Entry.Kind.file,
+ .symlink = PathString.empty,
+ };
var symlink: []const u8 = "";
if (is_symlink) {
@@ -930,7 +935,7 @@ pub const FileSystem = struct {
cache.kind = .file;
}
if (symlink.len > 0) {
- cache.symlink = try FilenameStore.instance.append([]const u8, symlink);
+ cache.symlink = PathString.init(try FilenameStore.instance.append([]const u8, symlink));
}
return cache;
diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig
index cb79e0f77..e8935182c 100644
--- a/src/js_parser/js_parser.zig
+++ b/src/js_parser/js_parser.zig
@@ -59,6 +59,17 @@ pub fn locAfterOp(e: E.Binary) logger.Loc {
}
}
const ExportsStringName = "exports";
+
+// We must prevent collisions from generated names.
+// We want to avoid adding a pass over all the symbols in the file.
+// To do that:
+// For every generated symbol, we reserve two backup symbol names
+// If any usages of the preferred ref, we swap original_name with the backup
+// If any usages of the backup ref, we swap original_name with the internal
+// We *assume* the internal name is never used.
+// In practice, it is possible. But, the internal names are so crazy long you'd have to be deliberately trying to use them.
+const GeneratedSymbol = @import("../runtime.zig").Runtime.GeneratedSymbol;
+
pub const ImportScanner = struct {
stmts: []Stmt = &([_]Stmt{}),
@@ -625,6 +636,47 @@ pub const ImportScanner = struct {
}
};
+const StaticSymbolName = struct {
+ internal: string,
+ primary: string,
+ backup: string,
+
+ pub const List = struct {
+ fn NewStaticSymbol(comptime basename: string) StaticSymbolName {
+ return comptime StaticSymbolName{
+ .internal = basename ++ "_" ++ std.fmt.comptimePrint("{x}", .{std.hash.Wyhash.hash(0, basename)}),
+ .primary = basename,
+ .backup = "_" ++ basename ++ "$",
+ };
+ }
+
+ pub const jsx = NewStaticSymbol("jsx");
+ pub const jsxs = NewStaticSymbol("jsxs");
+ pub const ImportSource = NewStaticSymbol("JSX");
+ pub const ClassicImportSource = NewStaticSymbol("JSXClassic");
+ pub const jsxFilename = NewStaticSymbol("jsxFilename");
+ pub const Factory = NewStaticSymbol("createElement");
+ pub const Refresher = NewStaticSymbol("Refresher");
+ pub const Fragment = NewStaticSymbol("Fragment");
+
+ pub const __name = NewStaticSymbol("__name");
+ pub const __toModule = NewStaticSymbol("__toModule");
+ pub const __require = NewStaticSymbol("__require");
+ pub const __cJS2eSM = NewStaticSymbol("__cJS2eSM");
+ pub const __export = NewStaticSymbol("__export");
+ pub const __reExport = NewStaticSymbol("__reExport");
+ pub const __load = NewStaticSymbol("__load");
+ pub const @"$$lzy" = NewStaticSymbol("$$lzy");
+ pub const __HMRModule = NewStaticSymbol("__HMRModule");
+ pub const __HMRClient = NewStaticSymbol("__HMRClient");
+ pub const __FastRefreshModule = NewStaticSymbol("__FastRefreshModule");
+
+ pub const @"$$m" = NewStaticSymbol("$$m");
+
+ pub const hmr = NewStaticSymbol("hmr");
+ };
+};
+
pub const SideEffects = enum(u2) {
could_have_side_effects,
no_side_effects,
@@ -1919,11 +1971,11 @@ pub const Parser = struct {
// Auto-import JSX
if (p.options.jsx.parse) {
- const jsx_symbol: *const Symbol = &p.symbols.items[p.jsx_runtime_ref.inner_index];
- const jsx_static_symbol: *const Symbol = &p.symbols.items[p.jsxs_runtime_ref.inner_index];
- const jsx_fragment_symbol: *const Symbol = &p.symbols.items[p.jsx_fragment_ref.inner_index];
- const jsx_factory_symbol: *const Symbol = &p.symbols.items[p.jsx_factory_ref.inner_index];
- const jsx_filename_symbol: *const Symbol = &p.symbols.items[p.jsx_filename_ref.inner_index];
+ const jsx_symbol: *const Symbol = &p.symbols.items[p.jsx_runtime.ref.inner_index];
+ const jsx_static_symbol: *const Symbol = &p.symbols.items[p.jsxs_runtime.ref.inner_index];
+ const jsx_fragment_symbol: *const Symbol = &p.symbols.items[p.jsx_fragment.ref.inner_index];
+ const jsx_factory_symbol: *const Symbol = &p.symbols.items[p.jsx_factory.ref.inner_index];
+ const jsx_filename_symbol: *const Symbol = &p.symbols.items[p.jsx_filename.ref.inner_index];
// Currently, React (and most node_modules) ship a CJS version or a UMD version
// but we should assume that it'll pretty much always be CJS
@@ -1933,20 +1985,20 @@ pub const Parser = struct {
// This is kind of a broken way of doing it because it wouldn't work if it was more than one level deep
if (FeatureFlags.jsx_runtime_is_cjs) {
if (jsx_symbol.use_count_estimate > 0 or jsx_static_symbol.use_count_estimate > 0) {
- p.recordUsage(p.jsx_automatic_ref);
+ p.recordUsage(p.jsx_automatic.ref);
}
if (jsx_fragment_symbol.use_count_estimate > 0) {
- p.recordUsage(p.jsx_classic_ref);
+ p.recordUsage(p.jsx_classic.ref);
}
if (jsx_factory_symbol.use_count_estimate > 0) {
- p.recordUsage(p.jsx_classic_ref);
+ p.recordUsage(p.jsx_classic.ref);
}
}
- const jsx_classic_symbol: Symbol = p.symbols.items[p.jsx_classic_ref.inner_index];
- const jsx_automatic_symbol: Symbol = p.symbols.items[p.jsx_automatic_ref.inner_index];
+ const jsx_classic_symbol: *const Symbol = &p.symbols.items[p.jsx_classic.ref.inner_index];
+ const jsx_automatic_symbol: *const Symbol = &p.symbols.items[p.jsx_automatic.ref.inner_index];
// JSX auto-imports
// The classic runtime is a different import than the main import
@@ -1955,8 +2007,9 @@ pub const Parser = struct {
// 2. If you use a React.Fragment
// So we have to support both.
if (jsx_classic_symbol.use_count_estimate > 0 or jsx_automatic_symbol.use_count_estimate > 0) {
- const classic_namespace_ref = p.jsx_classic_ref;
- const automatic_namespace_ref = p.jsx_automatic_ref;
+ p.resolveStaticJSXSymbols();
+ const classic_namespace_ref = p.jsx_classic.ref;
+ const automatic_namespace_ref = p.jsx_automatic.ref;
const decls_count: u32 =
@intCast(u32, @boolToInt(jsx_symbol.use_count_estimate > 0)) * 2 + @intCast(u32, @boolToInt(jsx_static_symbol.use_count_estimate > 0)) * 2 +
@@ -1987,8 +2040,8 @@ pub const Parser = struct {
if (jsx_symbol.use_count_estimate > 0 or jsx_static_symbol.use_count_estimate > 0) {
if (jsx_automatic_symbol.use_count_estimate > 0) {
if (jsx_automatic_symbol.link != null) {
- p.symbols.items[p.jsx_automatic_ref.inner_index].link = null;
- p.symbols.items[p.jsx_automatic_ref.inner_index].original_name = try std.fmt.allocPrint(
+ p.symbols.items[p.jsx_automatic.ref.inner_index].link = null;
+ p.symbols.items[p.jsx_automatic.ref.inner_index].original_name = try std.fmt.allocPrint(
p.allocator,
"jsxImport{x}",
.{
@@ -2013,24 +2066,13 @@ pub const Parser = struct {
};
if (jsx_symbol.use_count_estimate > 0) {
- if (jsx_symbol.link != null) {
- p.symbols.items[p.jsx_runtime_ref.inner_index].link = null;
- p.symbols.items[p.jsx_runtime_ref.inner_index].original_name = try std.fmt.allocPrint(
- p.allocator,
- "jsx_{x}",
- .{
- @truncate(u16, std.hash.Wyhash.hash(0, p.options.jsx.import_source)),
- },
- );
- }
-
- declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_runtime_ref, .is_top_level = true };
+ declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_runtime.ref, .is_top_level = true };
declared_symbols_i += 1;
decls[decl_i] = G.Decl{
.binding = p.b(
B.Identifier{
- .ref = p.jsx_runtime_ref,
+ .ref = p.jsx_runtime.ref,
},
loc,
),
@@ -2048,24 +2090,13 @@ pub const Parser = struct {
}
if (jsx_static_symbol.use_count_estimate > 0) {
- if (jsx_static_symbol.link != null) {
- p.symbols.items[p.jsxs_runtime_ref.inner_index].link = null;
- p.symbols.items[p.jsxs_runtime_ref.inner_index].original_name = try std.fmt.allocPrint(
- p.allocator,
- "jsxs_{x}",
- .{
- @truncate(u16, std.hash.Wyhash.hash(0, p.options.jsx.import_source)),
- },
- );
- }
-
- declared_symbols[declared_symbols_i] = .{ .ref = p.jsxs_runtime_ref, .is_top_level = true };
+ declared_symbols[declared_symbols_i] = .{ .ref = p.jsxs_runtime.ref, .is_top_level = true };
declared_symbols_i += 1;
decls[decl_i] = G.Decl{
.binding = p.b(
B.Identifier{
- .ref = p.jsxs_runtime_ref,
+ .ref = p.jsxs_runtime.ref,
},
loc,
),
@@ -2084,21 +2115,12 @@ pub const Parser = struct {
}
if (jsx_filename_symbol.use_count_estimate > 0) {
- if (jsx_filename_symbol.link != null) {
- p.symbols.items[p.jsx_filename_ref.inner_index].link = null;
- p.symbols.items[p.jsx_filename_ref.inner_index].original_name = try std.fmt.allocPrint(
- p.allocator,
- "jsxFilename_{x}",
- .{@truncate(u16, std.hash.Wyhash.hash(0, p.options.jsx.import_source))},
- );
- }
-
- declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_filename_ref, .is_top_level = true };
+ declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_filename.ref, .is_top_level = true };
declared_symbols_i += 1;
decls[decl_i] = G.Decl{
.binding = p.b(
B.Identifier{
- .ref = p.jsx_filename_ref,
+ .ref = p.jsx_filename.ref,
},
loc,
),
@@ -2137,14 +2159,6 @@ pub const Parser = struct {
}
if (jsx_classic_symbol.use_count_estimate > 0) {
- if (jsx_classic_symbol.link != null) {
- p.symbols.items[p.jsx_classic_ref.inner_index].link = null;
- p.symbols.items[p.jsx_classic_ref.inner_index].original_name = try std.fmt.allocPrint(p.allocator, "{s}${x}", .{
- p.symbols.items[p.jsx_classic_ref.inner_index].original_name,
- jsx_classic_symbol.use_count_estimate,
- });
- }
-
const classic_identifier = p.e(E.Identifier{ .ref = classic_namespace_ref }, loc);
const dot_call_target = brk: {
@@ -2160,20 +2174,12 @@ pub const Parser = struct {
};
if (jsx_factory_symbol.use_count_estimate > 0) {
- if (jsx_factory_symbol.link != null) {
- p.symbols.items[p.jsx_factory_ref.inner_index].link = null;
- p.symbols.items[p.jsx_factory_ref.inner_index].original_name = try std.fmt.allocPrint(p.allocator, "{s}${x}", .{
- p.symbols.items[p.jsx_factory_ref.inner_index].original_name,
- jsx_factory_symbol.use_count_estimate,
- });
- }
-
- declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_factory_ref, .is_top_level = true };
+ declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_factory.ref, .is_top_level = true };
declared_symbols_i += 1;
decls[decl_i] = G.Decl{
.binding = p.b(
B.Identifier{
- .ref = p.jsx_factory_ref,
+ .ref = p.jsx_factory.ref,
},
loc,
),
@@ -2191,20 +2197,12 @@ pub const Parser = struct {
}
if (jsx_fragment_symbol.use_count_estimate > 0) {
- if (jsx_fragment_symbol.link != null) {
- p.symbols.items[p.jsx_fragment_ref.inner_index].link = null;
- p.symbols.items[p.jsx_fragment_ref.inner_index].original_name = try std.fmt.allocPrint(p.allocator, "{s}${x}", .{
- p.symbols.items[p.jsx_fragment_ref.inner_index].original_name,
- jsx_fragment_symbol.use_count_estimate,
- });
- }
-
- declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_fragment_ref, .is_top_level = true };
+ declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_fragment.ref, .is_top_level = true };
declared_symbols_i += 1;
decls[decl_i] = G.Decl{
.binding = p.b(
B.Identifier{
- .ref = p.jsx_fragment_ref,
+ .ref = p.jsx_fragment.ref,
},
loc,
),
@@ -2246,21 +2244,15 @@ pub const Parser = struct {
if (p.options.features.react_fast_refresh) {
defer did_import_fast_refresh = true;
- const refresh_runtime_symbol: *const Symbol = &p.symbols.items[p.jsx_refresh_runtime_ref.inner_index];
- if (refresh_runtime_symbol.link != null) {
- p.symbols.items[p.jsx_refresh_runtime_ref.inner_index].link = null;
- p.symbols.items[p.jsx_refresh_runtime_ref.inner_index].original_name = try std.fmt.allocPrint(p.allocator, "{s}${x}", .{
- p.symbols.items[p.jsx_refresh_runtime_ref.inner_index].original_name,
- refresh_runtime_symbol.use_count_estimate,
- });
- }
+ p.resolveGeneratedSymbol(&p.jsx_refresh_runtime);
+ const refresh_runtime_symbol: *const Symbol = &p.symbols.items[p.jsx_refresh_runtime.ref.inner_index];
- declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_refresh_runtime_ref, .is_top_level = true };
+ declared_symbols[declared_symbols_i] = .{ .ref = p.jsx_refresh_runtime.ref, .is_top_level = true };
declared_symbols_i += 1;
const import_record_id = p.addImportRecord(.require, loc, p.options.jsx.refresh_runtime);
jsx_part_stmts[stmt_i] = p.s(S.Import{
- .namespace_ref = p.jsx_refresh_runtime_ref,
+ .namespace_ref = p.jsx_refresh_runtime.ref,
.star_name_loc = loc,
.is_single_line = true,
.import_record_index = import_record_id,
@@ -2268,18 +2260,18 @@ pub const Parser = struct {
stmt_i += 1;
p.named_imports.put(
- p.jsx_refresh_runtime_ref,
+ p.jsx_refresh_runtime.ref,
js_ast.NamedImport{
.alias = refresh_runtime_symbol.original_name,
.alias_is_star = true,
.alias_loc = loc,
- .namespace_ref = p.jsx_refresh_runtime_ref,
+ .namespace_ref = p.jsx_refresh_runtime.ref,
.import_record_index = import_record_id,
},
) catch unreachable;
- p.is_import_item.put(p.jsx_refresh_runtime_ref, true) catch unreachable;
+ p.is_import_item.put(p.jsx_refresh_runtime.ref, true) catch unreachable;
import_records[import_record_i] = import_record_id;
- p.recordUsage(p.jsx_refresh_runtime_ref);
+ p.recordUsage(p.jsx_refresh_runtime.ref);
}
jsx_part_stmts[stmt_i] = p.s(S.Local{ .kind = .k_var, .decls = decls[0..decl_i] }, loc);
@@ -2296,41 +2288,36 @@ pub const Parser = struct {
}
if (!did_import_fast_refresh and p.options.features.react_fast_refresh) {
+ p.resolveGeneratedSymbol(&p.jsx_refresh_runtime);
+
std.debug.assert(!p.options.enable_bundling);
var declared_symbols = try p.allocator.alloc(js_ast.DeclaredSymbol, 1);
const loc = logger.Loc.Empty;
const import_record_id = p.addImportRecord(.require, loc, p.options.jsx.refresh_runtime);
var import_stmt = p.s(S.Import{
- .namespace_ref = p.jsx_refresh_runtime_ref,
+ .namespace_ref = p.jsx_refresh_runtime.ref,
.star_name_loc = loc,
.is_single_line = true,
.import_record_index = import_record_id,
}, loc);
- p.recordUsage(p.jsx_refresh_runtime_ref);
- const refresh_runtime_symbol: *const Symbol = &p.symbols.items[p.jsx_refresh_runtime_ref.inner_index];
- if (refresh_runtime_symbol.link != null) {
- p.symbols.items[p.jsx_refresh_runtime_ref.inner_index].link = null;
- p.symbols.items[p.jsx_refresh_runtime_ref.inner_index].original_name = try std.fmt.allocPrint(p.allocator, "{s}${x}", .{
- p.symbols.items[p.jsx_refresh_runtime_ref.inner_index].original_name,
- refresh_runtime_symbol.use_count_estimate,
- });
- }
+ p.recordUsage(p.jsx_refresh_runtime.ref);
+ const refresh_runtime_symbol: *const Symbol = &p.symbols.items[p.jsx_refresh_runtime.ref.inner_index];
p.named_imports.put(
- p.jsx_refresh_runtime_ref,
+ p.jsx_refresh_runtime.ref,
js_ast.NamedImport{
.alias = refresh_runtime_symbol.original_name,
.alias_is_star = true,
.alias_loc = loc,
- .namespace_ref = p.jsx_refresh_runtime_ref,
+ .namespace_ref = p.jsx_refresh_runtime.ref,
.import_record_index = import_record_id,
},
) catch unreachable;
- p.is_import_item.put(p.jsx_refresh_runtime_ref, true) catch unreachable;
+ p.is_import_item.put(p.jsx_refresh_runtime.ref, true) catch unreachable;
var import_records = try p.allocator.alloc(@TypeOf(import_record_id), 1);
import_records[0] = import_record_id;
- declared_symbols[0] = .{ .ref = p.jsx_refresh_runtime_ref, .is_top_level = true };
+ declared_symbols[0] = .{ .ref = p.jsx_refresh_runtime.ref, .is_top_level = true };
var part_stmts = try p.allocator.alloc(Stmt, 1);
part_stmts[0] = import_stmt;
@@ -2342,6 +2329,8 @@ pub const Parser = struct {
}) catch unreachable;
}
+ if (p.options.enable_bundling) p.resolveBundlingSymbols();
+
var runtime_imports_iter = p.runtime_imports.iter();
const has_cjs_imports = p.cjs_import_stmts.items.len > 0 and p.options.transform_require_to_import;
// - don't import runtime if we're bundling, it's already included
@@ -2349,6 +2338,8 @@ pub const Parser = struct {
// - when HMR is not enabled, we only need any runtime imports if we're importing require()
if (!p.options.enable_bundling and (p.has_called_runtime or p.options.features.hot_module_reloading or has_cjs_imports)) {
const before_start = before.items.len;
+ if (p.options.features.hot_module_reloading) p.resolveHMRSymbols();
+
while (runtime_imports_iter.next()) |entry| {
const imports = [_]u16{entry.key};
p.generateImportStmt(
@@ -2682,18 +2673,12 @@ pub fn NewParser(
scopes_in_order_visitor_index: usize = 0,
has_classic_runtime_warned: bool = false,
- hmr_module_ref: js_ast.Ref = js_ast.Ref.None,
- hmr_activate_ref: js_ast.Ref = js_ast.Ref.None,
- hmr_client_ref: js_ast.Ref = js_ast.Ref.None,
- hmr_module_class_ref: js_ast.Ref = js_ast.Ref.None,
- hmr_exports_list: std.ArrayList(js_ast.ClauseItem),
+ hmr_module: GeneratedSymbol = GeneratedSymbol{ .primary = Ref.None, .backup = Ref.None, .ref = Ref.None },
has_called_runtime: bool = false,
cjs_import_stmts: std.ArrayList(Stmt),
- bundle_export_ref: ?Ref = null,
-
injected_define_symbols: List(Ref),
symbol_uses: SymbolUseMap,
declared_symbols: List(js_ast.DeclaredSymbol),
@@ -2734,18 +2719,15 @@ pub fn NewParser(
// "visit" pass.
enclosing_namespace_arg_ref: ?js_ast.Ref = null,
- jsx_filename_ref: js_ast.Ref = Ref.None,
- jsx_runtime_ref: js_ast.Ref = Ref.None,
- jsx_factory_ref: js_ast.Ref = Ref.None,
- jsx_fragment_ref: js_ast.Ref = Ref.None,
- jsx_automatic_ref: js_ast.Ref = Ref.None,
- jsxs_runtime_ref: js_ast.Ref = Ref.None,
- jsx_classic_ref: js_ast.Ref = Ref.None,
-
+ jsx_filename: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
+ jsx_runtime: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
+ jsx_factory: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
+ jsx_fragment: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
+ jsx_automatic: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
+ jsxs_runtime: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
+ jsx_classic: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
// only applicable when is_react_fast_refresh_enabled
- jsx_refresh_runtime_ref: js_ast.Ref = Ref.None,
-
- jsx_source_list_ref: js_ast.Ref = Ref.None,
+ jsx_refresh_runtime: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None },
// Imports (both ES6 and CommonJS) are tracked at the top level
import_records: ImportRecordList,
@@ -2965,15 +2947,21 @@ pub fn NewParser(
return p.e(E.Require{ .import_record_index = import_record_index }, arg.loc);
}
- const suffix = "_module";
- var base_identifier_name = fs.PathName.init(original_name).nonUniqueNameString(p.allocator) catch unreachable;
- var cjs_import_name = p.allocator.alloc(u8, base_identifier_name.len + suffix.len) catch unreachable;
- std.mem.copy(
- u8,
- cjs_import_name,
- base_identifier_name,
- );
- std.mem.copy(u8, cjs_import_name[base_identifier_name.len..], suffix);
+ const cjs_import_name = std.fmt.allocPrint(
+ p.allocator,
+ "{s}_{x}_{d}",
+ .{
+ original_name,
+ @truncate(
+ u16,
+ std.hash.Wyhash.hash(
+ 0,
+ p.import_records.items[import_record_index].path.text,
+ ),
+ ),
+ p.cjs_import_stmts.items.len,
+ },
+ ) catch unreachable;
const namespace_ref = p.declareSymbol(.hoisted, arg.loc, cjs_import_name) catch unreachable;
@@ -3511,76 +3499,100 @@ pub fn NewParser(
if (p.options.jsx.development) generated_symbols_count += 1;
}
- try p.module_scope.generated.ensureUnusedCapacity(generated_symbols_count);
+ try p.module_scope.generated.ensureUnusedCapacity(generated_symbols_count * 3);
+ try p.module_scope.members.ensureCapacity(generated_symbols_count * 3 + p.module_scope.members.count());
p.exports_ref = try p.declareCommonJSSymbol(.unbound, "exports");
p.module_ref = try p.declareCommonJSSymbol(.unbound, "module");
p.require_ref = try p.declareCommonJSSymbol(.unbound, "require");
if (p.options.enable_bundling) {
- p.bundle_export_ref = try p.declareGeneratedSymbol(.other, "IF_YOU_SEE_THIS_ITS_A_BUNDLER_BUG_PLEASE_FILE_AN_ISSUE_THX");
p.runtime_imports.__reExport = try p.declareGeneratedSymbol(.other, "__reExport");
- p.runtime_imports.register = try p.declareGeneratedSymbol(.other, "$$m");
- p.runtime_imports.lazy_export = try p.declareGeneratedSymbol(.other, "$$lzy");
+ p.runtime_imports.@"$$m" = try p.declareGeneratedSymbol(.other, "$$m");
+ p.recordUsage(p.runtime_imports.@"$$m".?.ref);
+ p.runtime_imports.@"$$lzy" = try p.declareGeneratedSymbol(.other, "$$lzy");
- p.runtime_imports.__export = p.exports_ref;
+ p.runtime_imports.__export = GeneratedSymbol{ .ref = p.exports_ref, .primary = Ref.None, .backup = Ref.None };
}
if (p.options.features.hot_module_reloading) {
- p.hmr_module_ref = try p.declareGeneratedSymbol(.other, "__hmrModule");
+ p.hmr_module = try p.declareGeneratedSymbol(.other, "hmr");
if (is_react_fast_refresh_enabled) {
- p.jsx_refresh_runtime_ref = try p.declareGeneratedSymbol(.other, "__RefreshRuntime");
+ p.jsx_refresh_runtime = try p.declareGeneratedSymbol(.other, "Refresher");
p.runtime_imports.__FastRefreshModule = try p.declareGeneratedSymbol(.other, "__FastRefreshModule");
- p.recordUsage(p.runtime_imports.__FastRefreshModule.?);
+ p.recordUsage(p.runtime_imports.__FastRefreshModule.?.ref);
} else {
p.runtime_imports.__HMRModule = try p.declareGeneratedSymbol(.other, "__HMRModule");
- p.recordUsage(p.runtime_imports.__HMRModule.?);
+ p.recordUsage(p.runtime_imports.__HMRModule.?.ref);
}
p.runtime_imports.__HMRClient = try p.declareGeneratedSymbol(.other, "__HMRClient");
- p.recordUsage(p.hmr_module_ref);
- p.recordUsage(p.runtime_imports.__HMRClient.?);
+ p.recordUsage(p.hmr_module.ref);
+ p.recordUsage(p.runtime_imports.__HMRClient.?.ref);
} else {
- p.runtime_imports.__export = p.exports_ref;
- p.runtime_imports.__require = p.require_ref;
+ p.runtime_imports.__export = GeneratedSymbol{ .ref = p.exports_ref, .primary = Ref.None, .backup = Ref.None };
+ p.runtime_imports.__require = GeneratedSymbol{ .ref = p.require_ref, .primary = Ref.None, .backup = Ref.None };
}
if (is_jsx_enabled) {
if (p.options.jsx.development) {
- p.jsx_filename_ref = p.declareGeneratedSymbol(.other, Prefill.Runtime.JSXFilename) catch unreachable;
+ p.jsx_filename = p.declareGeneratedSymbol(.other, "jsxFilename") catch unreachable;
}
- p.jsx_fragment_ref = p.declareGeneratedSymbol(.other, p.options.jsx.fragment[p.options.jsx.fragment.len - 1]) catch unreachable;
- p.jsx_runtime_ref = p.declareGeneratedSymbol(.other, Prefill.Runtime.JSXShortname) catch unreachable;
- p.jsxs_runtime_ref = p.declareGeneratedSymbol(.other, p.options.jsx.jsx_static) catch unreachable;
- p.jsx_factory_ref = p.declareGeneratedSymbol(.other, p.options.jsx.factory[p.options.jsx.factory.len - 1]) catch unreachable;
+ p.jsx_fragment = p.declareGeneratedSymbol(.other, "Fragment") catch unreachable;
+ p.jsx_runtime = p.declareGeneratedSymbol(.other, "jsx") catch unreachable;
+ p.jsxs_runtime = p.declareGeneratedSymbol(.other, "jsxs") catch unreachable;
+ p.jsx_factory = p.declareGeneratedSymbol(.other, "Factory") catch unreachable;
if (p.options.jsx.factory.len > 1 or FeatureFlags.jsx_runtime_is_cjs) {
- p.jsx_classic_ref = p.declareGeneratedSymbol(.other, JSXFactoryName) catch unreachable;
+ p.jsx_classic = p.declareGeneratedSymbol(.other, "ClassicImportSource") catch unreachable;
}
if (p.options.jsx.import_source.len > 0) {
- var jsx_symbol_name: string = JSXAutomaticName;
- // try to do:
- // var jsx = react.jsxDEV;
- // var jsx = emotion.jsxDEV;
- if (isPackagePath(p.options.jsx.import_source)) {
- var basename = p.options.jsx.import_source;
- basename = if (basename[0] == '@') basename[1..] else basename;
- if (strings.indexOfChar(basename, '/')) |path_end| {
- if (basename[0..path_end].len > 0) {
- if (js_lexer.isIdentifier(basename[0..path_end])) {
- jsx_symbol_name = basename[0..path_end];
- }
- }
- }
- }
-
- p.jsx_automatic_ref = p.declareGeneratedSymbol(.other, jsx_symbol_name) catch unreachable;
+ p.jsx_automatic = p.declareGeneratedSymbol(.other, "ImportSource") catch unreachable;
}
}
}
+ // This won't work for adversarial cases
+ pub fn resolveGeneratedSymbol(p: *P, generated_symbol: *GeneratedSymbol) void {
+ if (generated_symbol.ref.isNull()) return;
+
+ if (p.symbols.items[generated_symbol.ref.inner_index].use_count_estimate == 0) {
+ return;
+ }
+
+ if (p.symbols.items[generated_symbol.primary.inner_index].use_count_estimate == 0 and p.symbols.items[generated_symbol.primary.inner_index].link == null) {
+ p.symbols.items[generated_symbol.ref.inner_index].original_name = p.symbols.items[generated_symbol.primary.inner_index].original_name;
+ }
+
+ if (p.symbols.items[generated_symbol.backup.inner_index].use_count_estimate == 0 and p.symbols.items[generated_symbol.backup.inner_index].link == null) {
+ p.symbols.items[generated_symbol.ref.inner_index].original_name = p.symbols.items[generated_symbol.backup.inner_index].original_name;
+ }
+ }
+
+ pub fn resolveBundlingSymbols(p: *P) void {
+ p.resolveGeneratedSymbol(&p.runtime_imports.__reExport.?);
+ p.resolveGeneratedSymbol(&p.runtime_imports.@"$$m".?);
+ p.resolveGeneratedSymbol(&p.runtime_imports.@"$$lzy".?);
+ }
+
+ pub fn resolveHMRSymbols(p: *P) void {
+ p.resolveGeneratedSymbol(&p.hmr_module);
+ if (p.runtime_imports.__FastRefreshModule != null) p.resolveGeneratedSymbol(&p.runtime_imports.__FastRefreshModule.?);
+ if (p.runtime_imports.__HMRModule != null) p.resolveGeneratedSymbol(&p.runtime_imports.__HMRModule.?);
+ if (p.runtime_imports.__HMRClient != null) p.resolveGeneratedSymbol(&p.runtime_imports.__HMRClient.?);
+ }
+
+ pub fn resolveStaticJSXSymbols(p: *P) void {
+ p.resolveGeneratedSymbol(&p.jsx_runtime);
+ p.resolveGeneratedSymbol(&p.jsxs_runtime);
+ p.resolveGeneratedSymbol(&p.jsx_factory);
+ p.resolveGeneratedSymbol(&p.jsx_classic);
+ p.resolveGeneratedSymbol(&p.jsx_automatic);
+ p.resolveGeneratedSymbol(&p.jsx_filename);
+ }
+
fn hoistSymbols(p: *P, scope: *js_ast.Scope) void {
if (!scope.kindStopsHoisting()) {
var iter = scope.members.iterator();
@@ -7444,8 +7456,13 @@ pub fn NewParser(
return ref;
}
- fn declareGeneratedSymbol(p: *P, kind: Symbol.Kind, name: string) !Ref {
- return try declareSymbolMaybeGenerated(p, kind, logger.Loc.Empty, name, true);
+ fn declareGeneratedSymbol(p: *P, kind: Symbol.Kind, comptime name: string) !GeneratedSymbol {
+ const static = @field(StaticSymbolName.List, name);
+ return GeneratedSymbol{
+ .ref = try declareSymbolMaybeGenerated(p, kind, logger.Loc.Empty, static.internal, true),
+ .primary = try declareSymbolMaybeGenerated(p, .hoisted, logger.Loc.Empty, static.primary, true),
+ .backup = try declareSymbolMaybeGenerated(p, .hoisted, logger.Loc.Empty, static.backup, true),
+ };
}
fn declareSymbol(p: *P, kind: Symbol.Kind, loc: logger.Loc, name: string) !Ref {
@@ -10787,7 +10804,7 @@ pub fn NewParser(
if (e_.tag) |_tag| {
break :tagger p.visitExpr(_tag);
} else {
- break :tagger p.jsxStringsToMemberExpression(expr.loc, p.jsx_fragment_ref);
+ break :tagger p.jsxStringsToMemberExpression(expr.loc, p.jsx_fragment.ref);
}
};
@@ -10810,9 +10827,7 @@ pub fn NewParser(
const is_childless_tag = FeatureFlags.react_specific_warnings and children_count > 0 and tag.data == .e_string and tag.data.e_string.isUTF8() and js_lexer.ChildlessJSXTags.has(tag.data.e_string.utf8);
- if (is_childless_tag) {
- children_count = 0;
- }
+ children_count = if (is_childless_tag) 0 else children_count;
if (children_count != e_.children.len) {
// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.
@@ -10863,7 +10878,7 @@ pub fn NewParser(
// Call createElement()
return p.e(E.Call{
- .target = p.jsxStringsToMemberExpression(expr.loc, p.jsx_factory_ref),
+ .target = p.jsxStringsToMemberExpression(expr.loc, p.jsx_factory.ref),
.args = args[0..i],
// Enable tree shaking
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
@@ -10953,10 +10968,10 @@ pub fn NewParser(
};
var source = p.allocator.alloc(G.Property, 2) catch unreachable;
- p.recordUsage(p.jsx_filename_ref);
+ p.recordUsage(p.jsx_filename.ref);
source[0] = G.Property{
.key = Expr{ .loc = expr.loc, .data = Prefill.Data.Filename },
- .value = p.e(E.Identifier{ .ref = p.jsx_filename_ref }, expr.loc),
+ .value = p.e(E.Identifier{ .ref = p.jsx_filename.ref }, expr.loc),
};
source[1] = G.Property{
@@ -12112,7 +12127,7 @@ pub fn NewParser(
}
fn jsxStringsToMemberExpressionAutomatic(p: *P, loc: logger.Loc, is_static: bool) Expr {
- return p.jsxStringsToMemberExpression(loc, if (is_static and !p.options.jsx.development) p.jsxs_runtime_ref else p.jsx_runtime_ref);
+ return p.jsxStringsToMemberExpression(loc, if (is_static and !p.options.jsx.development) p.jsxs_runtime.ref else p.jsx_runtime.ref);
}
// If we are currently in a hoisted child of the module scope, relocate these
@@ -13724,9 +13739,10 @@ pub fn NewParser(
p.has_called_runtime = true;
if (!p.runtime_imports.contains(name)) {
- ref = p.newSymbol(.other, name) catch unreachable;
+ const generated_symbol = p.declareGeneratedSymbol(.other, name) catch unreachable;
+ ref = generated_symbol.ref;
p.module_scope.generated.append(ref) catch unreachable;
- p.runtime_imports.put(name, ref);
+ p.runtime_imports.put(name, generated_symbol);
} else {
ref = p.runtime_imports.at(name).?;
}
@@ -14286,20 +14302,20 @@ pub fn NewParser(
const new_call_args_count: usize = comptime if (is_react_fast_refresh_enabled) 3 else 2;
var call_args = try p.allocator.alloc(Expr, new_call_args_count + 1);
var new_call_args = call_args[0..new_call_args_count];
- var hmr_module_ident = p.e(E.Identifier{ .ref = p.hmr_module_ref }, logger.Loc.Empty);
+ var hmr_module_ident = p.e(E.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty);
new_call_args[0] = p.e(E.Number{ .value = @intToFloat(f64, p.options.filepath_hash_for_hmr) }, logger.Loc.Empty);
// This helps us provide better error messages
new_call_args[1] = p.e(E.String{ .utf8 = p.source.path.pretty }, logger.Loc.Empty);
if (is_react_fast_refresh_enabled) {
- new_call_args[2] = p.e(E.Identifier{ .ref = p.jsx_refresh_runtime_ref }, logger.Loc.Empty);
+ new_call_args[2] = p.e(E.Identifier{ .ref = p.jsx_refresh_runtime.ref }, logger.Loc.Empty);
}
var exports_dot = p.e(E.Dot{
.target = hmr_module_ident,
.name = ExportsStringName,
.name_loc = logger.Loc.Empty,
}, logger.Loc.Empty);
- var hmr_module_class_ident = p.e(E.Identifier{ .ref = p.runtime_imports.__HMRClient.? }, logger.Loc.Empty);
+ var hmr_module_class_ident = p.e(E.Identifier{ .ref = p.runtime_imports.__HMRClient.?.ref }, logger.Loc.Empty);
var toplevel_stmts_i: u8 = 0;
// HMRClient.activate(true)
toplevel_stmts[toplevel_stmts_i] = p.s(
@@ -14321,15 +14337,18 @@ pub fn NewParser(
var first_decl = decls[0..2];
// We cannot rely on import.meta.url because if we import it within a blob: url, it will be nonsensical
// var __hmrModule = new HMRModule(123123124, "/index.js"), __exports = __hmrModule.exports;
- const hmr_import_ref = if (comptime is_react_fast_refresh_enabled) p.runtime_imports.__FastRefreshModule else p.runtime_imports.__HMRModule;
+ const hmr_import_ref = (if (comptime is_react_fast_refresh_enabled)
+ p.runtime_imports.__FastRefreshModule.?
+ else
+ p.runtime_imports.__HMRModule.?).ref;
first_decl[0] = G.Decl{
- .binding = p.b(B.Identifier{ .ref = p.hmr_module_ref }, logger.Loc.Empty),
+ .binding = p.b(B.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty),
.value = p.e(E.New{
.args = new_call_args,
.target = p.e(
E.Identifier{
- .ref = hmr_import_ref.?,
+ .ref = hmr_import_ref,
},
logger.Loc.Empty,
),
@@ -14338,7 +14357,7 @@ pub fn NewParser(
first_decl[1] = G.Decl{
.binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty),
.value = p.e(E.Dot{
- .target = p.e(E.Identifier{ .ref = p.hmr_module_ref }, logger.Loc.Empty),
+ .target = p.e(E.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty),
.name = "exports",
.name_loc = logger.Loc.Empty,
}, logger.Loc.Empty),
@@ -14632,7 +14651,6 @@ pub fn NewParser(
.named_exports = p.named_exports,
.import_keyword = p.es6_import_keyword,
.export_keyword = p.es6_export_keyword,
- .bundle_export_ref = p.bundle_export_ref,
.require_ref = p.require_ref,
.uses_module_ref = (p.symbols.items[p.module_ref.inner_index].use_count_estimate > 0),
@@ -14714,7 +14732,6 @@ pub fn NewParser(
.needs_jsx_import = if (comptime only_scan_imports_and_do_not_visit) false else NeedsJSXType{},
.lexer = lexer,
- .hmr_exports_list = @TypeOf(this.hmr_exports_list).init(allocator),
};
if (comptime !only_scan_imports_and_do_not_visit) {
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 7e59a9bfa..706b52503 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -2437,7 +2437,7 @@ pub fn NewPrinter(
return;
// module.exports = $react();
} else {
- p.printSymbol(p.options.runtime_imports.__reExport.?);
+ p.printSymbol(p.options.runtime_imports.__reExport.?.ref);
p.print("(");
p.printModuleExportSymbol();
p.print(",");
@@ -2572,7 +2572,7 @@ pub fn NewPrinter(
// $$lz(export, $React(), {default: "React"});
if (s.items.len == 1) {
const item = s.items[0];
- p.printSymbol(p.options.runtime_imports.lazy_export.?);
+ p.printSymbol(p.options.runtime_imports.@"$$lzy".?.ref);
p.print("(");
p.printModuleExportSymbol();
p.print(",");
@@ -2588,7 +2588,7 @@ pub fn NewPrinter(
p.printSemicolonAfterStatement();
// $$lz(export, $React(), {createElement: "React"});
} else {
- p.printSymbol(p.options.runtime_imports.lazy_export.?);
+ p.printSymbol(p.options.runtime_imports.@"$$lzy".?.ref);
p.print("(");
p.printModuleExportSymbol();
p.print(",");
@@ -4142,6 +4142,7 @@ pub fn printCommonJSThreaded(
comptime GetPosType: type,
getter: GetPosType,
comptime getPos: fn (ctx: GetPosType) anyerror!u64,
+ end_off_ptr: *u32,
) !WriteResult {
const PrinterType = NewPrinter(false, Writer, LinkerType, true, false);
var writer = _writer;
@@ -4191,7 +4192,9 @@ pub fn printCommonJSThreaded(
}
}
try printer.writer.done();
+ @fence(.SeqCst);
result.end_off = @truncate(u32, try getPos(getter));
+ @atomicStore(u32, end_off_ptr, result.end_off, .SeqCst);
}
result.len = @intCast(usize, std.math.max(printer.writer.written, 0));
diff --git a/src/resolver/dir_info.zig b/src/resolver/dir_info.zig
index 5d3c53f96..ba108b1b9 100644
--- a/src/resolver/dir_info.zig
+++ b/src/resolver/dir_info.zig
@@ -15,14 +15,22 @@ parent: Index = allocators.NotFound,
// A pointer to the enclosing dirInfo with a valid "browser" field in
// package.json. We need this to remap paths after they have been resolved.
enclosing_browser_scope: Index = allocators.NotFound,
+enclosing_package_json: ?*const PackageJSON = null,
+enclosing_tsconfig_json: ?*const TSConfigJSON = null,
abs_path: string = "",
entries: Index = undefined,
has_node_modules: bool = false, // Is there a "node_modules" subdirectory?
+is_node_modules: bool = false, // Is this a "node_modules" directory?
package_json: ?*PackageJSON = null, // Is there a "package.json" file?
tsconfig_json: ?*TSConfigJSON = null, // Is there a "tsconfig.json" file in this directory or a parent directory?
abs_real_path: string = "", // If non-empty, this is the real absolute path resolving any symlinks
+pub fn hasParentPackage(this: *const DirInfo) bool {
+ const parent = this.getParent() orelse return false;
+ return !parent.is_node_modules;
+}
+
pub fn getFileDescriptor(dirinfo: *const DirInfo) StoredFileDescriptorType {
if (!FeatureFlags.store_file_descriptors) {
return 0;
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 972feb06c..00b736eba 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -12,7 +12,7 @@ const resolver = @import("./resolver.zig");
// Assume they're not going to have hundreds of main fields or browser map
// so use an array-backed hash table instead of bucketed
const MainFieldMap = std.StringArrayHashMap(string);
-const BrowserMap = std.StringArrayHashMap(string);
+pub const BrowserMap = std.StringArrayHashMap(string);
threadlocal var hashy: [2048]u8 = undefined;
pub const PackageJSON = struct {
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 3502b29cf..2c9de730f 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -8,11 +8,13 @@ const cache = @import("../cache.zig");
const sync = @import("../sync.zig");
const TSConfigJSON = @import("./tsconfig_json.zig").TSConfigJSON;
const PackageJSON = @import("./package_json.zig").PackageJSON;
+const BrowserMap = @import("./package_json.zig").BrowserMap;
+
usingnamespace @import("./data_url.zig");
pub const DirInfo = @import("./dir_info.zig");
const HTTPWatcher = @import("../http.zig").Watcher;
const Wyhash = std.hash.Wyhash;
-
+const ResolvePath = @import("./resolve_path.zig");
const NodeFallbackModules = @import("../node_fallbacks.zig");
const Mutex = @import("../lock.zig").Lock;
@@ -191,6 +193,7 @@ pub const DirEntryResolveQueueItem = struct {
safe_path: string = "",
fd: StoredFileDescriptorType = 0,
};
+
threadlocal var _dir_entry_paths_to_resolve: [256]DirEntryResolveQueueItem = undefined;
threadlocal var _open_dirs: [256]std.fs.Dir = undefined;
threadlocal var resolve_without_remapping_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
@@ -203,6 +206,10 @@ threadlocal var load_as_file_or_directory_via_tsconfig_base_path: [std.fs.MAX_PA
threadlocal var node_modules_check_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var field_abs_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
threadlocal var tsconfig_path_abs_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var check_browser_map_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var remap_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var load_as_file_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var remap_path_trailing_slash: [std.fs.MAX_PATH_BYTES]u8 = undefined;
pub const DebugLogs = struct {
what: string = "",
@@ -291,6 +298,7 @@ pub const MatchResult = struct {
is_node_module: bool = false,
package_json: ?*PackageJSON = null,
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
+ dir_info: ?*DirInfo = null,
};
pub const LoadResult = struct {
@@ -298,6 +306,7 @@ pub const LoadResult = struct {
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase,
dirname_fd: StoredFileDescriptorType = 0,
file_fd: StoredFileDescriptorType = 0,
+ dir_info: ?*DirInfo = null,
};
// This is a global so even if multiple resolvers are created, the mutex will still work
@@ -715,7 +724,7 @@ pub fn NewResolver(cache_files: bool) type {
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text }) catch {};
}
- query.entry.cache.symlink = symlink;
+ query.entry.cache.symlink = PathString.init(symlink);
if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
path.setRealpath(symlink);
@@ -755,7 +764,7 @@ pub fn NewResolver(cache_files: bool) type {
// First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
if ((r.dirInfoCached(source_dir) catch null)) |_dir_info| {
const dir_info: *DirInfo = _dir_info;
- if (dir_info.tsconfig_json) |tsconfig| {
+ if (dir_info.enclosing_tsconfig_json) |tsconfig| {
if (tsconfig.paths.count() > 0) {
if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
@@ -826,36 +835,37 @@ pub fn NewResolver(cache_files: bool) type {
};
}
- // Check the "browser" map for the first time (1 out of 2)
+ // Check the "browser" map
if (r.dirInfoCached(std.fs.path.dirname(abs_path) orelse unreachable) catch null) |_import_dir_info| {
if (_import_dir_info.getEnclosingBrowserScope()) |import_dir_info| {
- if (import_dir_info.package_json) |pkg| {
- const pkg_json_dir = std.fs.path.dirname(pkg.source.key_path.text) orelse unreachable;
-
- const rel_path = r.fs.relative(pkg_json_dir, abs_path);
- if (r.checkBrowserMap(pkg, rel_path)) |remap| {
- // Is the path disabled?
- if (remap.len == 0) {
- var _path = Path.init(r.fs.dirname_store.append(string, abs_path) catch unreachable);
- _path.is_disabled = true;
- return Result{
- .path_pair = PathPair{
- .primary = _path,
- },
- };
- }
+ const pkg = import_dir_info.package_json.?;
+ if (r.checkBrowserMap(
+ import_dir_info,
+ abs_path,
+ .AbsolutePath,
+ )) |remap| {
+
+ // Is the path disabled?
+ if (remap.len == 0) {
+ var _path = Path.init(r.fs.dirname_store.append(string, abs_path) catch unreachable);
+ _path.is_disabled = true;
+ return Result{
+ .path_pair = PathPair{
+ .primary = _path,
+ },
+ };
+ }
- if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| {
- result = Result{
- .path_pair = _result.path_pair,
- .diff_case = _result.diff_case,
- .module_type = pkg.module_type,
- .dirname_fd = _result.dirname_fd,
- .package_json = pkg,
- };
- check_relative = false;
- check_package = false;
- }
+ if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| {
+ result = Result{
+ .path_pair = _result.path_pair,
+ .diff_case = _result.diff_case,
+ .module_type = pkg.module_type,
+ .dirname_fd = _result.dirname_fd,
+ .package_json = pkg,
+ };
+ check_relative = false;
+ check_package = false;
}
}
}
@@ -893,7 +903,15 @@ pub fn NewResolver(cache_files: bool) type {
result.package_json = @intToPtr(*PackageJSON, @ptrToInt(fallback_module.package_json));
result.is_from_node_modules = true;
return result;
- } else if (had_node_prefix) {
+ // "node:*
+ // "fs"
+ // "fs/*"
+ // These are disabled!
+ } else if (had_node_prefix or
+ (import_path_without_node_prefix.len >= 2 and strings.eqlComptimeIgnoreLen(import_path_without_node_prefix[0..2], "fs") and
+ (import_path_without_node_prefix.len == 2 or
+ import_path_without_node_prefix[3] == '/')))
+ {
result.path_pair.primary.namespace = "node";
result.path_pair.primary.text = import_path_without_node_prefix;
result.path_pair.primary.name = Fs.PathName.init(import_path_without_node_prefix);
@@ -930,7 +948,11 @@ pub fn NewResolver(cache_files: bool) type {
// Support remapping one package path to another via the "browser" field
if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| {
if (browser_scope.package_json) |package_json| {
- if (r.checkBrowserMap(package_json, import_path)) |remapped| {
+ if (r.checkBrowserMap(
+ browser_scope,
+ import_path,
+ .PackagePath,
+ )) |remapped| {
if (remapped.len == 0) {
// "browser": {"module": false}
if (r.loadNodeModules(import_path, kind, source_dir_info)) |node_module| {
@@ -960,42 +982,45 @@ pub fn NewResolver(cache_files: bool) type {
}
if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| {
- result = Result{
- .path_pair = res.path_pair,
- .diff_case = res.diff_case,
- .dirname_fd = res.dirname_fd,
- .package_json = res.package_json,
- };
- } else {
- // Note: node's "self references" are not currently supported
- return null;
- }
- }
+ result.path_pair = res.path_pair;
+ result.dirname_fd = res.dirname_fd;
+ result.file_fd = res.file_fd;
+ result.package_json = res.package_json;
+ result.diff_case = res.diff_case;
- var iter = result.path_pair.iter();
- while (iter.next()) |path| {
- const dirname = std.fs.path.dirname(path.text) orelse continue;
- const base_dir_info = ((r.dirInfoCached(dirname) catch null)) orelse continue;
- const dir_info = base_dir_info.getEnclosingBrowserScope() orelse continue;
- const pkg_json = dir_info.package_json orelse continue;
- const rel_path = r.fs.relative(pkg_json.source.path.name.dirWithTrailingSlash(), path.text);
- result.module_type = pkg_json.module_type;
- result.package_json = result.package_json orelse pkg_json;
- if (r.checkBrowserMap(pkg_json, rel_path)) |remapped| {
- if (remapped.len == 0) {
- path.is_disabled = true;
- } else if (r.resolveWithoutRemapping(dir_info, remapped, kind)) |remapped_result| {
- // iter.index is the next one, not the prev
- switch (iter.index - 1) {
- 0 => {
- result.path_pair.primary = remapped_result.path_pair.primary;
- result.dirname_fd = remapped_result.dirname_fd;
- },
- else => {
- result.path_pair.secondary = remapped_result.path_pair.primary;
- },
+ if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) {
+ return result;
+ }
+
+ if (res.package_json) |pkg| {
+ var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return result;
+ if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| {
+ if (r.checkBrowserMap(
+ browser_scope,
+ res.path_pair.primary.text,
+ .AbsolutePath,
+ )) |remap| {
+ if (remap.len == 0) {
+ result.path_pair.primary.is_disabled = true;
+ result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file");
+ } else {
+ if (r.resolveWithoutRemapping(base_dir_info, remap, kind)) |remapped| {
+ result.path_pair = remapped.path_pair;
+ result.dirname_fd = remapped.dirname_fd;
+ result.file_fd = remapped.file_fd;
+ result.package_json = remapped.package_json;
+ result.diff_case = remapped.diff_case;
+ return result;
+ }
+ }
+ }
}
}
+
+ return result;
+ } else {
+ // Note: node's "self references" are not currently supported
+ return null;
}
}
@@ -1090,6 +1115,7 @@ pub fn NewResolver(cache_files: bool) type {
pub fn loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult {
var res = _loadNodeModules(r, import_path, kind, _dir_info) orelse return null;
res.is_node_module = true;
+
return res;
}
@@ -1108,7 +1134,7 @@ pub fn NewResolver(cache_files: bool) type {
// First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
- if (dir_info.tsconfig_json) |tsconfig| {
+ if (dir_info.enclosing_tsconfig_json) |tsconfig| {
// Try path substitutions first
if (tsconfig.paths.count() > 0) {
if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
@@ -1182,7 +1208,7 @@ pub fn NewResolver(cache_files: bool) type {
const key_path = Path.init(file);
const source = logger.Source.initPathString(key_path.text, entry.contents);
- const file_dir = std.fs.path.dirname(file) orelse return null;
+ const file_dir = source.path.sourceDir();
var result = (try TSConfigJSON.parse(r.allocator, r.log, source, @TypeOf(r.caches.json), &r.caches.json)) orelse return null;
@@ -1516,7 +1542,7 @@ pub fn NewResolver(cache_files: bool) type {
// This closely follows the behavior of "tryLoadModuleUsingPaths()" in the
// official TypeScript compiler
- pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult {
+ pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *const TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult {
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable;
}
@@ -1628,55 +1654,163 @@ pub fn NewResolver(cache_files: bool) type {
return null;
}
- pub fn checkBrowserMap(r: *ThisResolver, pkg: *PackageJSON, input_path: string) ?string {
- // Normalize the path so we can compare against it without getting confused by "./"
- var cleaned = r.fs.normalize(input_path);
- const original_cleaned = cleaned;
+ const BrowserMapPath = struct {
+ remapped: string = "",
+ cleaned: string = "",
+ input_path: string = "",
+ extension_order: []const string,
+ map: BrowserMap,
- if (cleaned.len == 1 and cleaned[0] == '.') {
- // No bundler supports remapping ".", so we don't either
- return null;
- }
+ pub threadlocal var abs_to_rel_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for \"{s}\" in the \"browser\" map in \"{s}\"", .{ input_path, pkg.source.path.text }) catch {};
- }
+ pub const Kind = enum { PackagePath, AbsolutePath };
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for \"{s}\" ", .{cleaned}) catch {};
- }
- var remapped = pkg.browser_map.get(cleaned);
- if (remapped == null) {
- for (r.extension_order) |ext| {
- std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned);
+ pub fn checkPath(
+ this: *BrowserMapPath,
+ path_to_check: string,
+ ) bool {
+ const map = this.map;
+
+ const cleaned = this.cleaned;
+ // Check for equality
+ if (this.map.get(path_to_check)) |result| {
+ this.remapped = result;
+ this.input_path = path_to_check;
+ return true;
+ }
+
+ std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned);
+
+ // If that failed, try adding implicit extensions
+ for (this.extension_order) |ext| {
std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext);
const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len];
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
- }
- if (pkg.browser_map.get(new_path)) |_remapped| {
- remapped = _remapped;
- cleaned = new_path;
- break;
+ // if (r.debug_logs) |*debug| {
+ // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ // }
+ if (map.get(new_path)) |_remapped| {
+ this.remapped = _remapped;
+ this.cleaned = new_path;
+ this.input_path = new_path;
+ return true;
}
}
- }
- if (remapped) |remap| {
- // "" == disabled, {"browser": { "file.js": false }}
- if (remap.len == 0 or (remap.len == 1 and remap[0] == '.')) {
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found \"{s}\" marked as disabled", .{remap}) catch {};
+ // If that failed, try assuming this is a directory and looking for an "index" file
+
+ var index_path: string = "";
+ {
+ var parts = [_]string{ std.mem.trimRight(u8, path_to_check, std.fs.path.sep_str), std.fs.path.sep_str ++ "index" };
+ index_path = ResolvePath.joinStringBuf(&tsconfig_base_url_buf, &parts, .auto);
+ }
+
+ if (map.get(index_path)) |_remapped| {
+ this.remapped = _remapped;
+ this.input_path = index_path;
+ return true;
+ }
+
+ std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, index_path);
+
+ for (this.extension_order) |ext| {
+ std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len .. index_path.len + ext.len], ext);
+ const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. index_path.len + ext.len];
+ // if (r.debug_logs) |*debug| {
+ // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ // }
+ if (map.get(new_path)) |_remapped| {
+ this.remapped = _remapped;
+ this.cleaned = new_path;
+ this.input_path = new_path;
+ return true;
}
- return remap;
}
- if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found \"{s}\" remapped to \"{s}\"", .{ original_cleaned, remap }) catch {};
+ return false;
+ }
+ };
+
+ pub fn checkBrowserMap(
+ r: *ThisResolver,
+ dir_info: *const DirInfo,
+ input_path_: string,
+ comptime kind: BrowserMapPath.Kind,
+ ) ?string {
+ const package_json = dir_info.package_json orelse return null;
+ const browser_map = package_json.browser_map;
+
+ if (browser_map.count() == 0) return null;
+
+ var input_path = input_path_;
+
+ if (comptime kind == .AbsolutePath) {
+ const abs_path = dir_info.abs_path;
+ // Turn absolute paths into paths relative to the "browser" map location
+ if (!strings.startsWith(input_path, abs_path)) {
+ return null;
}
- // Only allocate on successful remapping.
- return r.allocator.dupe(u8, remap) catch unreachable;
+ input_path = input_path[abs_path.len..];
+ }
+
+ if (input_path.len == 0 or (input_path.len == 1 and (input_path[0] == '.' or input_path[0] == std.fs.path.sep))) {
+ // No bundler supports remapping ".", so we don't either
+ return null;
+ }
+
+ // Normalize the path so we can compare against it without getting confused by "./"
+ var cleaned = r.fs.normalizeBuf(&check_browser_map_buf, input_path);
+
+ if (cleaned.len == 1 and cleaned[0] == '.') {
+ // No bundler supports remapping ".", so we don't either
+ return null;
+ }
+
+ var checker = BrowserMapPath{
+ .remapped = "",
+ .cleaned = cleaned,
+ .input_path = input_path,
+ .extension_order = r.extension_order,
+ .map = package_json.browser_map,
+ };
+
+ if (checker.checkPath(input_path)) {
+ return checker.remapped;
+ }
+
+ // First try the import path as a package path
+ if (isPackagePath(checker.input_path)) {
+ switch (comptime kind) {
+ .AbsolutePath => {
+ BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*;
+ std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path);
+ if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) {
+ return checker.remapped;
+ }
+ },
+ .PackagePath => {
+ // Browserify allows a browser map entry of "./pkg" to override a package
+ // path of "require('pkg')". This is weird, and arguably a bug. But we
+ // replicate this bug for compatibility. However, Browserify only allows
+ // this within the same package. It does not allow such an entry in a
+ // parent package to override this in a child package. So this behavior
+ // is disallowed if there is a "node_modules" folder in between the child
+ // package and the parent package.
+ const isInSamePackage = brk: {
+ const parent = dir_info.getParent() orelse break :brk true;
+ break :brk !parent.is_node_modules;
+ };
+
+ if (isInSamePackage) {
+ BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*;
+ std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path);
+
+ if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) {
+ return checker.remapped;
+ }
+ }
+ },
+ }
}
return null;
@@ -1699,7 +1833,11 @@ pub fn NewResolver(cache_files: bool) type {
// Potentially remap using the "browser" field
if (dir_info.getEnclosingBrowserScope()) |browser_scope| {
if (browser_scope.package_json) |browser_json| {
- if (r.checkBrowserMap(browser_json, field_rel_path)) |remap| {
+ if (r.checkBrowserMap(
+ browser_scope,
+ field_rel_path,
+ .AbsolutePath,
+ )) |remap| {
// Is the path disabled?
if (remap.len == 0) {
const paths = [_]string{ path, field_rel_path };
@@ -1759,9 +1897,16 @@ pub fn NewResolver(cache_files: bool) type {
if (dir_info.getEntries()) |entries| {
if (entries.get(base)) |lookup| {
if (lookup.entry.kind(rfs) == .file) {
- const parts = [_]string{ path, base };
- const out_buf_ = r.fs.absBuf(&parts, &index_buf);
- const out_buf = r.fs.dirname_store.append(@TypeOf(out_buf_), out_buf_) catch unreachable;
+ const out_buf = brk: {
+ if (lookup.entry.abs_path.isEmpty()) {
+ const parts = [_]string{ path, base };
+ const out_buf_ = r.fs.absBuf(&parts, &index_buf);
+ lookup.entry.abs_path =
+ PathString.init(r.fs.dirname_store.append(@TypeOf(out_buf_), out_buf_) catch unreachable);
+ }
+ break :brk lookup.entry.abs_path.slice();
+ };
+
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable;
}
@@ -1793,16 +1938,30 @@ pub fn NewResolver(cache_files: bool) type {
return null;
}
- pub fn loadAsIndexWithBrowserRemapping(r: *ThisResolver, dir_info: *DirInfo, path: string, extension_order: []const string) ?MatchResult {
+ pub fn loadAsIndexWithBrowserRemapping(r: *ThisResolver, dir_info: *DirInfo, path_: string, extension_order: []const string) ?MatchResult {
+ // In order for our path handling logic to be correct, it must end with a trailing slash.
+ var path = path_;
+ if (!strings.endsWithChar(path_, std.fs.path.sep)) {
+ std.mem.copy(u8, &remap_path_trailing_slash, path);
+ remap_path_trailing_slash[path.len] = std.fs.path.sep;
+ remap_path_trailing_slash[path.len + 1] = 0;
+ path = remap_path_trailing_slash[0 .. path.len + 1];
+ }
+
if (dir_info.getEnclosingBrowserScope()) |browser_scope| {
const field_rel_path = comptime "index";
+
if (browser_scope.package_json) |browser_json| {
- if (r.checkBrowserMap(browser_json, field_rel_path)) |remap| {
+ if (r.checkBrowserMap(
+ browser_scope,
+ field_rel_path,
+ .AbsolutePath,
+ )) |remap| {
+
// Is the path disabled?
- // This doesn't really make sense to me.
if (remap.len == 0) {
const paths = [_]string{ path, field_rel_path };
- const new_path = r.fs.absAlloc(r.allocator, &paths) catch unreachable;
+ const new_path = r.fs.absBuf(&paths, &remap_path_buf);
var _path = Path.init(new_path);
_path.is_disabled = true;
return MatchResult{
@@ -1814,7 +1973,7 @@ pub fn NewResolver(cache_files: bool) type {
}
const new_paths = [_]string{ path, remap };
- const remapped_abs = r.fs.absAlloc(r.allocator, &new_paths) catch unreachable;
+ const remapped_abs = r.fs.absBuf(&new_paths, &remap_path_buf);
// Is this a file
if (r.loadAsFile(remapped_abs, extension_order)) |file_result| {
@@ -1833,7 +1992,7 @@ pub fn NewResolver(cache_files: bool) type {
}
}
- return r.loadAsIndex(dir_info, path, extension_order);
+ return r.loadAsIndex(dir_info, path_, extension_order);
}
pub fn loadAsFileOrDirectory(r: *ThisResolver, path: string, kind: ast.ImportKind) ?MatchResult {
@@ -2037,8 +2196,15 @@ pub fn NewResolver(cache_files: bool) type {
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {};
}
- const abs_path_parts = [_]string{ query.entry.dir, query.entry.base() };
- const abs_path = r.fs.dirname_store.append(string, r.fs.absBuf(&abs_path_parts, &TemporaryBuffer.ExtensionPathBuf)) catch unreachable;
+
+ const abs_path = brk: {
+ if (query.entry.abs_path.isEmpty()) {
+ const abs_path_parts = [_]string{ query.entry.dir, query.entry.base() };
+ query.entry.abs_path = PathString.init(r.fs.dirname_store.append(string, r.fs.absBuf(&abs_path_parts, &load_as_file_buf)) catch unreachable);
+ }
+
+ break :brk query.entry.abs_path.slice();
+ };
return LoadResult{
.path = abs_path,
@@ -2050,9 +2216,9 @@ pub fn NewResolver(cache_files: bool) type {
}
// Try the path with extensions
- std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, path);
+ std.mem.copy(u8, &load_as_file_buf, path);
for (r.extension_order) |ext| {
- var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. path.len + ext.len];
+ var buffer = load_as_file_buf[0 .. path.len + ext.len];
std.mem.copy(u8, buffer[path.len..buffer.len], ext);
const file_name = buffer[path.len - base.len .. buffer.len];
@@ -2068,7 +2234,14 @@ pub fn NewResolver(cache_files: bool) type {
// now that we've found it, we allocate it.
return LoadResult{
- .path = r.fs.dirname_store.append(@TypeOf(buffer), buffer) catch unreachable,
+ .path = brk: {
+ query.entry.abs_path = if (query.entry.abs_path.isEmpty())
+ PathString.init(r.fs.dirname_store.append(@TypeOf(buffer), buffer) catch unreachable)
+ else
+ query.entry.abs_path;
+
+ break :brk query.entry.abs_path.slice();
+ },
.diff_case = query.diff_case,
.dirname_fd = entries.fd,
.file_fd = query.entry.cache.fd,
@@ -2096,12 +2269,12 @@ pub fn NewResolver(cache_files: bool) type {
const ext = base[last_dot..base.len];
if (strings.eql(ext, ".js") or strings.eql(ext, ".jsx")) {
const segment = base[0..last_dot];
- std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, segment);
+ std.mem.copy(u8, &load_as_file_buf, segment);
const exts = comptime [_]string{ ".ts", ".tsx" };
for (exts) |ext_to_replace| {
- var buffer = TemporaryBuffer.ExtensionPathBuf[0 .. segment.len + ext_to_replace.len];
+ var buffer = load_as_file_buf[0 .. segment.len + ext_to_replace.len];
std.mem.copy(u8, buffer[segment.len..buffer.len], ext_to_replace);
if (entries.get(buffer)) |query| {
@@ -2111,7 +2284,14 @@ pub fn NewResolver(cache_files: bool) type {
}
return LoadResult{
- .path = r.fs.dirname_store.append(@TypeOf(buffer), buffer) catch unreachable,
+ .path = brk: {
+ query.entry.abs_path = if (query.entry.abs_path.isEmpty())
+ PathString.init(r.fs.dirname_store.append(@TypeOf(buffer), buffer) catch unreachable)
+ else
+ query.entry.abs_path;
+
+ break :brk query.entry.abs_path.slice();
+ },
.diff_case = query.diff_case,
.dirname_fd = entries.fd,
.file_fd = query.entry.cache.fd,
@@ -2169,8 +2349,10 @@ pub fn NewResolver(cache_files: bool) type {
// base must
if (base.len > 1 and base[base.len - 1] == std.fs.path.sep) base = base[0 .. base.len - 1];
+ info.is_node_modules = strings.eqlComptime(base, "node_modules");
+
// if (entries != null) {
- if (!strings.eqlComptime(base, "node_modules")) {
+ if (!info.is_node_modules) {
if (entries.getComptimeQuery("node_modules")) |entry| {
info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
}
@@ -2181,6 +2363,8 @@ pub fn NewResolver(cache_files: bool) type {
// Propagate the browser scope into child directories
info.enclosing_browser_scope = parent.?.enclosing_browser_scope;
+ info.enclosing_package_json = parent.?.enclosing_package_json;
+ info.enclosing_tsconfig_json = parent.?.enclosing_tsconfig_json;
// Make sure "absRealPath" is the real path of the directory (resolving any symlinks)
if (!r.opts.preserve_symlinks) {
@@ -2198,11 +2382,12 @@ pub fn NewResolver(cache_files: bool) type {
} else if (parent.?.abs_real_path.len > 0) {
// this might leak a little i'm not sure
const parts = [_]string{ parent.?.abs_real_path, base };
- symlink = r.fs.dirname_store.append(string, r.fs.joinBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
+ symlink = r.fs.dirname_store.append(string, r.fs.absBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
if (r.debug_logs) |*logs| {
try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
}
+ lookup.entry.cache.symlink = PathString.init(symlink);
info.abs_real_path = symlink;
}
}
@@ -2219,6 +2404,7 @@ pub fn NewResolver(cache_files: bool) type {
if (info.package_json) |pkg| {
if (pkg.browser_map.count() > 0) {
info.enclosing_browser_scope = result.index;
+ info.enclosing_package_json = pkg;
}
if (r.debug_logs) |*logs| {
@@ -2269,12 +2455,9 @@ pub fn NewResolver(cache_files: bool) type {
}
break :brk null;
};
+ info.enclosing_tsconfig_json = info.tsconfig_json;
}
}
-
- if (info.tsconfig_json == null and parent != null) {
- info.tsconfig_json = parent.?.tsconfig_json;
- }
}
};
}
diff --git a/src/runtime.js b/src/runtime.js
index 2bcbdacc5..e2fc4e681 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -127,7 +127,7 @@ export var __require = (namespace) => {
return namespace;
// // is it an ESM module record?
// if (namespaceType === "object") return namespace;
- // // is it a CommonJS module?
+ // // is it a CommonJS module?
// // i have no idea what it is so i'm just going to try stuff and pray
// var entry = require_cache.get(namespace);
diff --git a/src/runtime.version b/src/runtime.version
index 50cc16396..2c1fbc202 100644
--- a/src/runtime.version
+++ b/src/runtime.version
@@ -1 +1 @@
-56f0fd66cd90913c \ No newline at end of file
+510583ac26030943 \ No newline at end of file
diff --git a/src/runtime.zig b/src/runtime.zig
index dcb84d7e8..78f0776f3 100644
--- a/src/runtime.zig
+++ b/src/runtime.zig
@@ -208,21 +208,26 @@ pub const Runtime = struct {
pub const ActivateFunction = "activate";
};
+ pub const GeneratedSymbol = struct {
+ primary: Ref,
+ backup: Ref,
+ ref: Ref,
+ };
+
// If you change this, remember to update "runtime.footer.js" and rebuild the runtime.js
pub const Imports = struct {
- __name: ?Ref = null,
- __toModule: ?Ref = null,
- __cJS2eSM: ?Ref = null,
- __require: ?Ref = null,
- __export: ?Ref = null,
- __reExport: ?Ref = null,
- __load: ?Ref = null,
- load_from_bundle: ?Ref = null,
- register: ?Ref = null,
- lazy_export: ?Ref = null,
- __HMRModule: ?Ref = null,
- __HMRClient: ?Ref = null,
- __FastRefreshModule: ?Ref = null,
+ __name: ?GeneratedSymbol = null,
+ __toModule: ?GeneratedSymbol = null,
+ __cJS2eSM: ?GeneratedSymbol = null,
+ __require: ?GeneratedSymbol = null,
+ __export: ?GeneratedSymbol = null,
+ __reExport: ?GeneratedSymbol = null,
+ __load: ?GeneratedSymbol = null,
+ @"$$m": ?GeneratedSymbol = null,
+ @"$$lzy": ?GeneratedSymbol = null,
+ __HMRModule: ?GeneratedSymbol = null,
+ __HMRClient: ?GeneratedSymbol = null,
+ __FastRefreshModule: ?GeneratedSymbol = null,
pub const all = [_][]const u8{
"__name",
@@ -232,11 +237,8 @@ pub const Runtime = struct {
"__export",
"__reExport",
"__load",
- // require
- "load_from_bundle",
- //
- "register",
- "lazy_export",
+ "$$m",
+ "$$lzy",
"__HMRModule",
"__HMRClient",
"__FastRefreshModule",
@@ -260,67 +262,62 @@ pub const Runtime = struct {
switch (this.i) {
0 => {
if (@field(this.runtime_imports, all[0])) |val| {
- return Entry{ .key = 0, .value = val };
+ return Entry{ .key = 0, .value = val.ref };
}
},
1 => {
if (@field(this.runtime_imports, all[1])) |val| {
- return Entry{ .key = 1, .value = val };
+ return Entry{ .key = 1, .value = val.ref };
}
},
2 => {
if (@field(this.runtime_imports, all[2])) |val| {
- return Entry{ .key = 2, .value = val };
+ return Entry{ .key = 2, .value = val.ref };
}
},
3 => {
if (@field(this.runtime_imports, all[3])) |val| {
- return Entry{ .key = 3, .value = val };
+ return Entry{ .key = 3, .value = val.ref };
}
},
4 => {
if (@field(this.runtime_imports, all[4])) |val| {
- return Entry{ .key = 4, .value = val };
+ return Entry{ .key = 4, .value = val.ref };
}
},
5 => {
if (@field(this.runtime_imports, all[5])) |val| {
- return Entry{ .key = 5, .value = val };
+ return Entry{ .key = 5, .value = val.ref };
}
},
6 => {
if (@field(this.runtime_imports, all[6])) |val| {
- return Entry{ .key = 6, .value = val };
+ return Entry{ .key = 6, .value = val.ref };
}
},
7 => {
if (@field(this.runtime_imports, all[7])) |val| {
- return Entry{ .key = 7, .value = val };
+ return Entry{ .key = 7, .value = val.ref };
}
},
8 => {
if (@field(this.runtime_imports, all[8])) |val| {
- return Entry{ .key = 8, .value = val };
+ return Entry{ .key = 8, .value = val.ref };
}
},
9 => {
if (@field(this.runtime_imports, all[9])) |val| {
- return Entry{ .key = 9, .value = val };
+ return Entry{ .key = 9, .value = val.ref };
}
},
10 => {
if (@field(this.runtime_imports, all[10])) |val| {
- return Entry{ .key = 10, .value = val };
+ return Entry{ .key = 10, .value = val.ref };
}
},
11 => {
if (@field(this.runtime_imports, all[11])) |val| {
- return Entry{ .key = 11, .value = val };
- }
- },
- 12 => {
- if (@field(this.runtime_imports, all[12])) |val| {
- return Entry{ .key = 12, .value = val };
+ return Entry{ .key = 11, .value = val.ref };
}
},
@@ -352,15 +349,15 @@ pub const Runtime = struct {
return false;
}
- pub fn put(imports: *Imports, comptime key: string, ref: Ref) void {
- @field(imports, key) = ref;
+ pub fn put(imports: *Imports, comptime key: string, generated_symbol: GeneratedSymbol) void {
+ @field(imports, key) = generated_symbol;
}
pub fn at(
imports: *Imports,
comptime key: string,
) ?Ref {
- return @field(imports, key);
+ return (@field(imports, key) orelse return null).ref;
}
pub fn get(
@@ -368,19 +365,18 @@ pub const Runtime = struct {
key: anytype,
) ?Ref {
return switch (key) {
- 0 => @field(imports, all[0]),
- 1 => @field(imports, all[1]),
- 2 => @field(imports, all[2]),
- 3 => @field(imports, all[3]),
- 4 => @field(imports, all[4]),
- 5 => @field(imports, all[5]),
- 6 => @field(imports, all[6]),
- 7 => @field(imports, all[7]),
- 8 => @field(imports, all[8]),
- 9 => @field(imports, all[9]),
- 10 => @field(imports, all[10]),
- 11 => @field(imports, all[11]),
- 12 => @field(imports, all[12]),
+ 0 => (@field(imports, all[0]) orelse return null).ref,
+ 1 => (@field(imports, all[1]) orelse return null).ref,
+ 2 => (@field(imports, all[2]) orelse return null).ref,
+ 3 => (@field(imports, all[3]) orelse return null).ref,
+ 4 => (@field(imports, all[4]) orelse return null).ref,
+ 5 => (@field(imports, all[5]) orelse return null).ref,
+ 6 => (@field(imports, all[6]) orelse return null).ref,
+ 7 => (@field(imports, all[7]) orelse return null).ref,
+ 8 => (@field(imports, all[8]) orelse return null).ref,
+ 9 => (@field(imports, all[9]) orelse return null).ref,
+ 10 => (@field(imports, all[10]) orelse return null).ref,
+ 11 => (@field(imports, all[11]) orelse return null).ref,
else => null,
};
}
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index 923d860cc..83719571b 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -206,6 +206,10 @@ pub inline fn endsWith(self: string, str: string) bool {
return str.len == 0 or @call(.{ .modifier = .always_inline }, std.mem.endsWith, .{ u8, self, str });
}
+pub inline fn endsWithChar(self: string, char: u8) bool {
+ return self.len == 0 or self[self.len - 1] == char;
+}
+
pub fn endsWithAny(self: string, str: string) bool {
const end = self[self.len - 1];
for (str) |char| {
diff --git a/src/string_types.zig b/src/string_types.zig
index 549e7cf2d..02f692629 100644
--- a/src/string_types.zig
+++ b/src/string_types.zig
@@ -1,3 +1,43 @@
+const std = @import("std");
pub const string = []const u8;
pub const stringMutable = []u8;
pub const CodePoint = i32;
+
+// macOS sets file path limit to 1024
+// Since a pointer on x64 is 64 bits and only 46 bits are used
+// We can safely store the entire path slice in a single u64.
+pub const PathString = packed struct {
+ const PathIntLen = std.math.IntFittingRange(0, std.fs.MAX_PATH_BYTES);
+ pub const use_small_path_string = @bitSizeOf(usize) - @bitSizeOf(PathIntLen) >= 53;
+ pub const PathInt = if (use_small_path_string) PathIntLen else usize;
+ pub const PointerIntType = if (use_small_path_string) u53 else usize;
+ ptr: PointerIntType,
+ len: PathInt,
+
+ pub inline fn slice(this: PathString) string {
+ @setRuntimeSafety(false); // "cast causes pointer to be null" is fine here. if it is null, the len will be 0.
+ return @intToPtr([*]u8, @intCast(usize, this.ptr))[0..this.len];
+ }
+
+ pub inline fn init(str: string) PathString {
+ @setRuntimeSafety(false); // "cast causes pointer to be null" is fine here. if it is null, the len will be 0.
+
+ return PathString{
+ .ptr = @truncate(PointerIntType, @ptrToInt(str.ptr)),
+ .len = @truncate(PathInt, str.len),
+ };
+ }
+
+ pub inline fn isEmpty(this: PathString) bool {
+ return this.len == 0;
+ }
+
+ pub const empty = PathString{ .ptr = 0, .len = 0 };
+ comptime {
+ if (use_small_path_string and @bitSizeOf(PathString) != 64) {
+ @compileError("PathString must be 64 bits");
+ } else if (!use_small_path_string and @bitSizeOf(PathString) != 128) {
+ @compileError("PathString must be 128 bits");
+ }
+ }
+};