diff options
-rw-r--r-- | src/ast/base.zig | 2 | ||||
-rw-r--r-- | src/bundler.zig | 42 | ||||
-rw-r--r-- | src/fs.zig | 24 | ||||
-rw-r--r-- | src/js_ast.zig | 10 | ||||
-rw-r--r-- | src/js_parser/js_parser.zig | 12 | ||||
-rw-r--r-- | src/resolver/resolver.zig | 8 | ||||
-rw-r--r-- | src/thread_safe_hash_map.zig | 19 |
7 files changed, 92 insertions, 25 deletions
diff --git a/src/ast/base.zig b/src/ast/base.zig index 3f10beb37..361339f7f 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -57,7 +57,7 @@ pub const Ref = packed struct { } pub fn eql(ref: Ref, b: Ref) bool { - return ref.inner_index == b.inner_index and ref.source_index == b.source_index; + return std.mem.readIntNative(u64, std.mem.asBytes(&ref)) == std.mem.readIntNative(u64, std.mem.asBytes(&b)); } pub fn jsonStringify(self: *const Ref, options: anytype, writer: anytype) !void { diff --git a/src/bundler.zig b/src/bundler.zig index 4d88b27f2..42b515678 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -52,6 +52,7 @@ pub const ServeResult = struct { // const BundleMap = const ResolveResults = ThreadSafeHashMap.ThreadSafeStringHashMap(Resolver.Resolver.Result); +const ResolveQueue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic); pub const Bundler = struct { options: options.BundleOptions, log: *logger.Log, @@ -62,7 +63,7 @@ pub const Bundler = struct { // thread_pool: *ThreadPool, output_files: std.ArrayList(options.OutputFile), resolve_results: *ResolveResults, - resolve_queue: std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic), + resolve_queue: ResolveQueue, elapsed: i128 = 0, needs_runtime: bool = false, @@ -96,7 +97,7 @@ pub const Bundler = struct { // .thread_pool = pool, .result = options.TransformResult{ .outbase = bundle_options.output_dir }, .resolve_results = try ResolveResults.init(allocator), - .resolve_queue = std.fifo.LinearFifo(Resolver.Resolver.Result, std.fifo.LinearFifoBufferType.Dynamic).init(allocator), + .resolve_queue = ResolveQueue.init(allocator), .output_files = std.ArrayList(options.OutputFile).init(allocator), }; } @@ -171,20 +172,32 @@ pub const Bundler = struct { // Run the resolver // Don't parse/print automatically. if (bundler.options.resolve_mode != .lazy) { - var hash_key = resolve_result.path_pair.primary.text; + try bundler.enqueueResolveResult(&resolve_result); + } - // Shorter hash key is faster to hash - if (strings.startsWith(resolve_result.path_pair.primary.text, bundler.fs.top_level_dir)) { - hash_key = resolve_result.path_pair.primary.text[bundler.fs.top_level_dir.len..]; - } + import_record.path = try bundler.generateImportPath(source_dir, resolve_result.path_pair.primary.text); + } - if (!bundler.resolve_results.contains(hash_key)) { - try bundler.resolve_results.put(hash_key, resolve_result); - try bundler.resolve_queue.writeItem(resolve_result); - } + pub fn resolveResultHashKey(bundler: *Bundler, resolve_result: *Resolver.Resolver.Result) string { + var hash_key = resolve_result.path_pair.primary.text; + + // Shorter hash key is faster to hash + if (strings.startsWith(resolve_result.path_pair.primary.text, bundler.fs.top_level_dir)) { + hash_key = resolve_result.path_pair.primary.text[bundler.fs.top_level_dir.len..]; } - import_record.path = try bundler.generateImportPath(source_dir, resolve_result.path_pair.primary.text); + return hash_key; + } + + pub fn enqueueResolveResult(bundler: *Bundler, resolve_result: *Resolver.Resolver.Result) !void { + const hash_key = bundler.resolveResultHashKey(resolve_result); + + const get_or_put_entry = try bundler.resolve_results.backing.getOrPut(hash_key); + + if (!get_or_put_entry.found_existing) { + get_or_put_entry.entry.value = resolve_result.*; + try bundler.resolve_queue.writeItem(resolve_result.*); + } } pub fn buildWithResolveResult(bundler: *Bundler, resolve_result: Resolver.Resolver.Result) !?options.OutputFile { @@ -608,7 +621,10 @@ pub const Bundler = struct { if (enableTracing) { Output.print( "\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n", - .{ bundler.resolver.elapsed, bundler.elapsed }, + .{ + bundler.resolver.elapsed, + bundler.elapsed, + }, ); } diff --git a/src/fs.zig b/src/fs.zig index d35eacff3..f13ee0495 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -9,6 +9,7 @@ const Semaphore = sync.Semaphore; const path_handler = @import("./resolver/resolve_path.zig"); const allocators = @import("./allocators.zig"); +const hash_map = @import("hash_map.zig"); // pub const FilesystemImplementation = @import("fs_impl.zig"); @@ -81,7 +82,7 @@ pub const FileSystem = struct { } pub const DirEntry = struct { - pub const EntryMap = std.StringHashMap(EntryStore.ListIndex); + pub const EntryMap = hash_map.StringHashMap(EntryStore.ListIndex); pub const EntryStore = allocators.BSSList(Entry, Preallocate.Counts.files); dir: string, fd: StoredFileDescriptorType = 0, @@ -187,6 +188,27 @@ pub const FileSystem = struct { return Entry.Lookup{ .entry = result, .diff_case = null }; } + + pub fn getComptimeQuery(entry: *const DirEntry, comptime query_str: anytype) ?Entry.Lookup { + comptime var query: [query_str.len]u8 = undefined; + comptime for (query_str) |c, i| { + query[i] = std.ascii.toLower(c); + }; + + const query_hashed = DirEntry.EntryMap.getHash(&query); + + const result_index = entry.data.getWithHash(&query, query_hashed) orelse return null; + const result = EntryStore.instance.at(result_index) orelse return null; + if (!strings.eql(result.base, query)) { + return Entry.Lookup{ .entry = result, .diff_case = Entry.Lookup.DifferentCase{ + .dir = entry.dir, + .query = &query, + .actual = result.base, + } }; + } + + return Entry.Lookup{ .entry = result, .diff_case = null }; + } }; pub const Entry = struct { diff --git a/src/js_ast.zig b/src/js_ast.zig index efd2374e9..5b3f1a7c1 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -4333,7 +4333,15 @@ pub const Scope = struct { // Do not make this a packed struct // Two hours of debugging time lost to that. - pub const Member = struct { ref: Ref, loc: logger.Loc }; + // It causes a crash due to undefined memory + pub const Member = struct { + ref: Ref, + loc: logger.Loc, + + pub fn eql(a: Member, b: Member) bool { + return @call(.{ .modifier = .always_inline }, Ref.eql, .{ a.ref, b.ref }) and a.loc.start == b.loc.start; + } + }; pub const Kind = enum(u8) { block, diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig index 27789be9a..ffa401bb7 100644 --- a/src/js_parser/js_parser.zig +++ b/src/js_parser/js_parser.zig @@ -2088,7 +2088,7 @@ pub const P = struct { pub fn findSymbol(p: *P, loc: logger.Loc, name: string) !FindSymbolResult { var declare_loc: logger.Loc = undefined; var is_inside_with_scope = false; - const hash = p.module_scope.members.getHash(name); + const hash = @TypeOf(p.module_scope.members).getHash(name); const ref: Ref = brk: { var _scope: ?*Scope = p.current_scope; @@ -2486,6 +2486,11 @@ pub const P = struct { // Check for collisions that would prevent to hoisting "var" symbols up to the enclosing function scope var __scope = scope.parent; + var hash: u64 = undefined; + if (__scope) |_scope| { + hash = @TypeOf(_scope.members).getHash(symbol.original_name); + } + while (__scope) |_scope| { // Variable declarations hoisted past a "with" statement may actually end // up overwriting a property on the target of the "with" statement instead @@ -2501,7 +2506,7 @@ pub const P = struct { symbol.must_not_be_renamed = true; } - if (_scope.members.getEntry(symbol.original_name)) |existing_member_entry| { + if (_scope.members.getEntryWithHash(symbol.original_name, hash)) |existing_member_entry| { const existing_member = existing_member_entry.value; const existing_symbol: Symbol = p.symbols.items[existing_member.ref.inner_index]; @@ -2518,13 +2523,12 @@ pub const P = struct { { // Silently merge this symbol into the existing symbol symbol.link = existing_member.ref; - _scope.members.put(symbol.original_name, existing_member) catch unreachable; continue :nextMember; } } if (_scope.kindStopsHoisting()) { - _scope.members.put(symbol.original_name, res.value) catch unreachable; + _scope.members.putWithHash(symbol.original_name, hash, res.value) catch unreachable; break; } __scope = _scope.parent; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 929fdb27c..15d411596 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -1635,7 +1635,7 @@ pub const Resolver = struct { var base = std.fs.path.basename(path); // if (entries != null) { if (!strings.eqlComptime(base, "node_modules")) { - if (entries.get("node_modules")) |entry| { + if (entries.getComptimeQuery("node_modules")) |entry| { // the catch might be wrong! info.has_node_modules = (entry.entry.kind(rfs)) == .dir; } @@ -1675,7 +1675,7 @@ pub const Resolver = struct { } // Record if this directory has a package.json file - if (entries.get("package.json")) |lookup| { + if (entries.getComptimeQuery("package.json")) |lookup| { const entry = lookup.entry; if (entry.kind(rfs) == .file) { info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null; @@ -1698,7 +1698,7 @@ pub const Resolver = struct { { var tsconfig_path: ?string = null; if (r.opts.tsconfig_override == null) { - if (entries.get("tsconfig.json")) |lookup| { + if (entries.getComptimeQuery("tsconfig.json")) |lookup| { const entry = lookup.entry; if (entry.kind(rfs) == .file) { const parts = [_]string{ path, "tsconfig.json" }; @@ -1707,7 +1707,7 @@ pub const Resolver = struct { } } if (tsconfig_path == null) { - if (entries.get("jsconfig.json")) |lookup| { + if (entries.getComptimeQuery("jsconfig.json")) |lookup| { const entry = lookup.entry; if (entry.kind(rfs) == .file) { const parts = [_]string{ path, "jsconfig.json" }; diff --git a/src/thread_safe_hash_map.zig b/src/thread_safe_hash_map.zig index 40aeff7d6..5c3a8cf51 100644 --- a/src/thread_safe_hash_map.zig +++ b/src/thread_safe_hash_map.zig @@ -1,9 +1,10 @@ const std = @import("std"); const sync = @import("sync.zig"); usingnamespace @import("global.zig"); +const hash_map = @import("hash_map.zig"); pub fn ThreadSafeStringHashMap(comptime Value: type) type { - const HashMapType = std.StringHashMap(Value); + const HashMapType = hash_map.StringHashMap(Value); return struct { backing: HashMapType, lock: sync.RwLock, @@ -22,12 +23,22 @@ pub fn ThreadSafeStringHashMap(comptime Value: type) type { return self.backing.get(key); } + pub fn getHash(key: string) u64 { + return HashMapType.getHash(key); + } + pub fn contains(self: *HashMap, str: string) bool { self.lock.lockShared(); defer self.lock.unlockShared(); return self.backing.contains(str); } + pub fn containsHash(self: *HashMap, hash: u64) bool { + self.lock.lockShared(); + defer self.lock.unlockShared(); + return self.backing.contains(str); + } + pub fn deinit(self: *HashMap, allocator: *std.mem.Allocator) void { self.backing.deinit(); } @@ -37,5 +48,11 @@ pub fn ThreadSafeStringHashMap(comptime Value: type) type { defer self.lock.unlock(); try self.backing.put(key, value); } + + pub fn putWithHash(self: *HashMap, key: string, hash: u64, value: Value) !void { + self.lock.lock(); + defer self.lock.unlock(); + try self.backing.put(key, value); + } }; } |