diff options
-rw-r--r-- | src/allocators.zig | 24 | ||||
-rw-r--r-- | src/ast/base.zig | 27 | ||||
-rw-r--r-- | src/cache.zig | 6 | ||||
-rw-r--r-- | src/defines.zig | 46 | ||||
-rw-r--r-- | src/fs.zig | 12 | ||||
-rw-r--r-- | src/js_parser/js_parser.zig | 3 | ||||
-rw-r--r-- | src/options.zig | 16 | ||||
-rw-r--r-- | src/resolver/resolver.zig | 54 | ||||
-rw-r--r-- | src/string_immutable.zig | 4 |
9 files changed, 126 insertions, 66 deletions
diff --git a/src/allocators.zig b/src/allocators.zig index 1bee408a0..f7ba9ff87 100644 --- a/src/allocators.zig +++ b/src/allocators.zig @@ -70,7 +70,15 @@ pub const IndexType = packed struct { }; const HashKeyType = u64; -const IndexMap = std.HashMapUnmanaged(HashKeyType, IndexType, hash_hashFn, hash_eqlFn, 80); +const IndexMap = std.HashMapUnmanaged(HashKeyType, IndexType, struct { + pub fn hash(ctx: @This(), key: HashKeyType) HashKeyType { + return key; + } + + pub fn eql(ctx: @This(), a: HashKeyType, b: HashKeyType) bool { + return a == b; + } +}, 80); pub const Result = struct { hash: HashKeyType, index: IndexType, @@ -97,14 +105,6 @@ pub const Unassigned = IndexType{ .index = std.math.maxInt(u31) - 1, }; -pub fn hash_hashFn(key: HashKeyType) HashKeyType { - return key; -} - -pub fn hash_eqlFn(a: HashKeyType, b: HashKeyType) bool { - return a == b; -} - pub const ItemStatus = enum(u3) { unknown, exists, @@ -389,15 +389,15 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, store_keys: boo if (index.found_existing) { return Result{ .hash = _key, - .index = index.entry.value, - .status = switch (index.entry.value.index) { + .index = index.value_ptr.*, + .status = switch (index.value_ptr.index) { NotFound.index => .not_found, Unassigned.index => .unknown, else => .exists, }, }; } - index.entry.value = Unassigned; + index.value_ptr.* = Unassigned; return Result{ .hash = _key, diff --git a/src/ast/base.zig b/src/ast/base.zig index 59d7c6c12..f1877018c 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -22,15 +22,22 @@ pub const NodeIndexNone = 4294967293; // be an array of arrays indexed first by source index, then by inner index. // The maps can be merged quickly by creating a single outer array containing // all inner arrays from all parsed files. + +pub const RefHashCtx = struct { + pub fn hash(ctx: @This(), key: Ref) u32 { + return @truncate(u32, std.hash.Wyhash.hash(0, std.mem.asBytes(&key))); + } + + pub fn eql(ctx: @This(), ref: Ref, b: Ref) bool { + return std.mem.readIntNative(u64, std.mem.asBytes(&ref)) == std.mem.readIntNative(u64, std.mem.asBytes(&b)); + } +}; + pub const Ref = packed struct { source_index: Int = std.math.maxInt(Ref.Int), inner_index: Int = 0, is_source_contents_slice: bool = false, - pub fn hash(key: Ref) u32 { - return @truncate(u32, std.hash.Wyhash.hash(0, std.mem.asBytes(&key))); - } - // 2 bits of padding for whatever is the parent pub const Int = u30; pub const None = Ref{ @@ -44,6 +51,14 @@ pub const Ref = packed struct { pub fn toInt(int: anytype) Int { return @intCast(Int, int); } + + pub fn hash(key: Ref) u32 { + return @truncate(u32, std.hash.Wyhash.hash(0, std.mem.asBytes(&key))); + } + + pub fn eql(ref: Ref, b: Ref) bool { + return std.mem.readIntNative(u64, std.mem.asBytes(&ref)) == std.mem.readIntNative(u64, std.mem.asBytes(&b)); + } pub fn isNull(self: *const Ref) bool { return self.source_index == std.math.maxInt(Ref.Int) and self.inner_index == std.math.maxInt(Ref.Int); } @@ -56,10 +71,6 @@ pub const Ref = packed struct { return int == std.math.maxInt(Ref.Int); } - pub fn eql(ref: Ref, b: Ref) bool { - return std.mem.readIntNative(u64, std.mem.asBytes(&ref)) == std.mem.readIntNative(u64, std.mem.asBytes(&b)); - } - pub fn jsonStringify(self: *const Ref, options: anytype, writer: anytype) !void { return try std.json.stringify([2]u32{ self.source_index, self.inner_index }, options, writer); } diff --git a/src/cache.zig b/src/cache.zig index 996f09588..c28312fbf 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -141,10 +141,10 @@ pub fn NewCache(comptime cache_files: bool) type { var res = c.entries.getOrPut(path) catch unreachable; if (res.found_existing) { - res.entry.value.deinit(c.entries.allocator); + res.value_ptr.*.deinit(c.entries.allocator); } - res.entry.value = entry; - return res.entry.value; + res.value_ptr.* = entry; + return res.value_ptr.*; } else { return entry; } diff --git a/src/defines.zig b/src/defines.zig index dab384600..58764c241 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -65,39 +65,39 @@ pub const DefineData = struct { var iter = defines.iterator(); while (iter.next()) |entry| { - var splitter = std.mem.split(entry.key, "."); + var splitter = std.mem.split(entry.key_ptr.*, "."); while (splitter.next()) |part| { if (!js_lexer.isIdentifier(part)) { - if (strings.eql(part, entry.key)) { - try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key}); + if (strings.eql(part, entry.key_ptr)) { + try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key_ptr}); } else { - try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key }); + try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key_ptr }); } break; } } - if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) { + if (js_lexer.isIdentifier(entry.value_ptr.*) and !js_lexer.Keywords.has(entry.value_ptr.*)) { var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier); ident.ref = Ref.None; ident.can_be_removed_if_unused = true; user_defines.putAssumeCapacity( - entry.key, + entry.key_ptr.*, DefineData{ .value = js_ast.Expr.Data{ .e_identifier = ident }, - .original_name = entry.value, + .original_name = entry.value_ptr.*, .can_be_removed_if_unused = true, }, ); // user_defines.putAssumeCapacity( - // entry.key, + // entry.key_ptr, // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } }, // ); continue; } var _log = log; var source = logger.Source{ - .contents = entry.value, + .contents = entry.value_ptr.*, .path = defines_path, .identifier_name = "defines", .key_path = fs.Path.initWithNamespace("defines", "internal"), @@ -110,9 +110,8 @@ pub const DefineData = struct { }, // We must copy so we don't recycle .e_string => { - const e_string = try expr.data.e_string.clone(allocator); - expr.data.e_string.* = e_string; - data = expr.data; + data = .{ .e_string = try allocator.create(js_ast.E.String) }; + data.e_string.* = try expr.data.e_string.clone(allocator); }, .e_null, .e_boolean, .e_number => { data = expr.data; @@ -134,7 +133,7 @@ pub const DefineData = struct { }, } - user_defines.putAssumeCapacity(entry.key, DefineData{ + user_defines.putAssumeCapacity(entry.key_ptr.*, DefineData{ .value = data, }); } @@ -198,8 +197,8 @@ pub const Define = struct { // TODO: move this to comptime // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber if (define.dots.getEntry(key)) |entry| { - var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1); - list.appendSliceAssumeCapacity(entry.value); + var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value_ptr.*.len + 1); + list.appendSliceAssumeCapacity(entry.value_ptr.*); list.appendAssumeCapacity(DotDefine{ .parts = global[0..global.len], .data = value_define, @@ -232,11 +231,12 @@ pub const Define = struct { if (_user_defines) |user_defines| { var iter = user_defines.iterator(); while (iter.next()) |user_define| { + const user_define_key = user_define.key_ptr.*; // If it has a dot, then it's a DotDefine. // e.g. process.env.NODE_ENV - if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| { - const tail = user_define.key[last_dot + 1 .. user_define.key.len]; - const remainder = user_define.key[0..last_dot]; + if (strings.lastIndexOfChar(user_define_key, '.')) |last_dot| { + const tail = user_define_key[last_dot + 1 .. user_define_key.len]; + const remainder = user_define_key[0..last_dot]; const count = std.mem.count(u8, remainder, ".") + 1; var parts = try allocator.alloc(string, count + 1); var splitter = std.mem.split(remainder, "."); @@ -250,16 +250,16 @@ pub const Define = struct { // "NODE_ENV" if (define.dots.getEntry(tail)) |entry| { - for (entry.value) |*part| { + for (entry.value_ptr.*) |*part| { // ["process", "env"] === ["process", "env"] (if that actually worked) if (arePartsEqual(part.parts, parts)) { - part.data = part.data.merge(user_define.value); + part.data = part.data.merge(user_define.value_ptr.*); didFind = true; break; } } - initial_values = entry.value; + initial_values = entry.value_ptr.*; } if (!didFind) { @@ -269,7 +269,7 @@ pub const Define = struct { } list.appendAssumeCapacity(DotDefine{ - .data = user_define.value, + .data = user_define.value_ptr.*, // TODO: do we need to allocate this? .parts = parts, }); @@ -277,7 +277,7 @@ pub const Define = struct { } } else { // e.g. IS_BROWSER - try define.identifiers.put(user_define.key, user_define.value); + try define.identifiers.put(user_define_key, user_define.value_ptr.*); } } } diff --git a/src/fs.zig b/src/fs.zig index 7dbcc4f40..a6a1584f4 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -154,7 +154,7 @@ pub const FileSystem = struct { var iter = i.data.iterator(); i.dir = dir; while (iter.next()) |entry| { - entry.value.dir = dir; + entry.value_ptr.dir = dir; } } @@ -504,7 +504,7 @@ pub const FileSystem = struct { } var entry = watcher.getOrPutValue(path, WatchData{ .state = state }) catch unreachable; - entry.value.state = state; + entry.value_ptr.state = state; } } @@ -519,7 +519,7 @@ pub const FileSystem = struct { defer fs.watcher_mutex.unlock(); var entry = watcher.getOrPutValue(path, WatchData{ .state = .file_has_mod_key, .mod_key = key }) catch unreachable; - entry.value.mod_key = key; + entry.value_ptr.mod_key = key; } return key; @@ -725,7 +725,7 @@ pub const FileSystem = struct { fs.watcher_mutex.lock(); defer fs.watcher_mutex.unlock(); var res = watcher.getOrPutValue(path, WatchData{ .state = .file_missing }) catch unreachable; - res.value.state = .file_missing; + res.value_ptr.state = .file_missing; } } @@ -776,8 +776,8 @@ pub const FileSystem = struct { fs.watcher_mutex.lock(); defer fs.watcher_mutex.unlock(); var res = watcher.getOrPutValue(path, WatchData{}) catch unreachable; - res.value.state = .file_need_mod_key; - res.value.file_contents = file_contents; + res.value_ptr.state = .file_need_mod_key; + res.value_ptr.file_contents = file_contents; } return File{ .path = Path.init(path), .contents = file_contents }; diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig index 546ac3f58..1802d7168 100644 --- a/src/js_parser/js_parser.zig +++ b/src/js_parser/js_parser.zig @@ -1472,7 +1472,6 @@ pub const Parser = struct { pub const Options = struct { jsx: options.JSX.Pragma, ts: bool = false, - ascii_only: bool = true, keep_names: bool = true, omit_runtime_for_tests: bool = false, ignore_dce_annotations: bool = true, @@ -13039,7 +13038,7 @@ pub fn NewParser( .s_export_clause => |clause| { for (clause.items) |item| { if (p.named_imports.getEntry(item.name.ref.?)) |_import| { - _import.value.is_exported = true; + _import.value_ptr.is_exported = true; } } }, diff --git a/src/options.zig b/src/options.zig index fd44c9d0a..954e01dfa 100644 --- a/src/options.zig +++ b/src/options.zig @@ -74,7 +74,7 @@ pub const ExternalModules = struct { // TODO: fix this stupid copy result.node_modules.hash_map.ensureCapacity(NodeBuiltinPatterns.len) catch unreachable; for (NodeBuiltinPatterns) |pattern| { - result.node_modules.put(pattern) catch unreachable; + result.node_modules.insert(pattern) catch unreachable; } } @@ -97,12 +97,12 @@ pub const ExternalModules = struct { .suffix = external[i + 1 .. external.len], }) catch unreachable; } else if (resolver.Resolver.isPackagePath(external)) { - result.node_modules.put(external) catch unreachable; + result.node_modules.insert(external) catch unreachable; } else { const normalized = validatePath(log, fs, cwd, external, allocator, "external path"); if (normalized.len > 0) { - result.abs_paths.put(normalized) catch unreachable; + result.abs_paths.insert(normalized) catch unreachable; } } } @@ -470,6 +470,14 @@ pub const Timings = struct { read_file: i128 = 0, }; +pub const DefaultUserDefines = struct { + // This must be globally scoped so it doesn't disappear + pub const NodeEnv = struct { + pub const Key = "process.env.NODE_ENV"; + pub const Value = "\"development\""; + }; +}; + pub const BundleOptions = struct { footer: string = "", banner: string = "", @@ -548,7 +556,7 @@ pub const BundleOptions = struct { var user_defines = try stringHashMapFromArrays(defines.RawDefines, allocator, transform.define_keys, transform.define_values); if (transform.define_keys.len == 0) { - try user_defines.put("process.env.NODE_ENV", "\"development\""); + try user_defines.put(DefaultUserDefines.NodeEnv.Key, DefaultUserDefines.NodeEnv.Value); } var resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator); diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 3f421d6ff..489dbbfed 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -529,7 +529,7 @@ pub fn NewResolver(cache_files: bool) type { } } - if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.exists(import_path)) { + if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.contains(import_path)) { // If the string literal in the source text is an absolute path and has // been marked as an external module, mark it as *not* an absolute path. // That way we preserve the literal text in the output and don't generate @@ -569,7 +569,7 @@ pub fn NewResolver(cache_files: bool) type { const parts = [_]string{ source_dir, import_path }; const abs_path = r.fs.absBuf(&parts, &relative_abs_path_buf); - if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.exists(abs_path)) { + if (r.opts.external.abs_paths.count() > 0 and r.opts.external.abs_paths.contains(abs_path)) { // If the string literal in the source text is an absolute path and has // been marked as an external module, mark it as *not* an absolute path. // That way we preserve the literal text in the output and don't generate @@ -643,7 +643,7 @@ pub fn NewResolver(cache_files: bool) type { if (r.opts.external.node_modules.count() > 0) { var query = import_path; while (true) { - if (r.opts.external.node_modules.exists(query)) { + if (r.opts.external.node_modules.contains(query)) { if (r.debug_logs) |*debug| { debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {}; } @@ -743,6 +743,27 @@ pub fn NewResolver(cache_files: bool) type { return result; } + // This is a fallback, hopefully not called often. It should be relatively quick because everything should be in the cache. + pub fn packageJSONForResolvedNodeModule(r: *ThisResolver, result: *const Result) ?*const PackageJSON { + var current_dir = std.fs.path.dirname(result.path_pair.primary.text); + while (current_dir != null) { + var dir_info = (r.dirInfoCached(current_dir orelse unreachable) catch null) orelse return null; + + if (dir_info.package_json) |pkg| { + // if it doesn't have a name, assume it's something just for adjusting the main fields (react-bootstrap does this) + // In that case, we really would like the top-level package that you download from NPM + // so we ignore any unnamed packages + if (pkg.name.len > 0) { + return pkg; + } + } + + current_dir = std.fs.path.dirname(current_dir.?); + } + + return null; + } + pub fn loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult { var res = _loadNodeModules(r, import_path, kind, _dir_info) orelse return null; res.is_node_module = true; @@ -1111,10 +1132,10 @@ pub fn NewResolver(cache_files: bool) type { { var iter = tsconfig.paths.iterator(); while (iter.next()) |entry| { - const key = entry.key; + const key = entry.key_ptr.*; if (strings.eql(key, path)) { - for (entry.value) |original_path| { + for (entry.value_ptr.*) |original_path| { var absolute_original_path = original_path; var was_alloc = false; @@ -1148,8 +1169,8 @@ pub fn NewResolver(cache_files: bool) type { var iter = tsconfig.paths.iterator(); while (iter.next()) |entry| { - const key = entry.key; - const original_paths = entry.value; + const key = entry.key_ptr.*; + const original_paths = entry.value_ptr.*; if (strings.indexOfChar(key, '*')) |star_index| { const prefix = key[0..star_index]; @@ -1320,11 +1341,14 @@ pub fn NewResolver(cache_files: bool) type { .path_pair = PathPair{ .primary = Fs.Path.init(result.path) }, .package_json_name = package_json.name, .package_json_version = package_json.version, + .dirname_fd = result.dirname_fd, }; } return MatchResult{ .path_pair = PathPair{ .primary = Fs.Path.init(result.path) }, + .dirname_fd = result.dirname_fd, + .diff_case = result.diff_case, }; } @@ -1355,9 +1379,20 @@ pub fn NewResolver(cache_files: bool) type { debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable; } + if (dir_info.package_json) |package_json| { + return MatchResult{ + .path_pair = .{ .primary = Path.init(out_buf) }, + .diff_case = lookup.diff_case, + .package_json_name = package_json.name, + .package_json_version = package_json.version, + .dirname_fd = dir_info.getFileDescriptor(), + }; + } + return MatchResult{ .path_pair = .{ .primary = Path.init(out_buf) }, .diff_case = lookup.diff_case, + .dirname_fd = dir_info.getFileDescriptor(), }; } @@ -1442,6 +1477,7 @@ pub fn NewResolver(cache_files: bool) type { } } } + return MatchResult{ .path_pair = .{ .primary = Path.init(file.path) }, .diff_case = file.diff_case, @@ -1486,7 +1522,7 @@ pub fn NewResolver(cache_files: bool) type { continue; }; - const _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue; + var _result = r.loadFromMainField(path, dir_info, field_rel_path, key, extension_order) orelse continue; // If the user did not manually configure a "main" field order, then // use a special per-module automatic algorithm to decide whether to @@ -1547,6 +1583,8 @@ pub fn NewResolver(cache_files: bool) type { } } + if (_result.package_json_version == null) _result.package_json_version = package_json_version; + if (_result.package_json_name == null) _result.package_json_name = package_json_name; return _result; } } diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 2e07f04ba..c0b72d178 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -97,6 +97,10 @@ pub fn endsWithAnyComptime(self: string, comptime str: string) bool { pub fn eql(self: string, other: anytype) bool { if (self.len != other.len) return false; + if (comptime @TypeOf(other) == *string) { + return eql(self, other.*); + } + for (self) |c, i| { if (other[i] != c) return false; } |