diff options
author | 2023-02-07 08:24:34 +0200 | |
---|---|---|
committer | 2023-02-06 22:24:34 -0800 | |
commit | 5a19f8434bc1f1e1eb3cb7c79f15638b8cbcd64f (patch) | |
tree | dcbe6b2bd433caf524b6e9c444c531cb0652ac4c /src | |
parent | d5f3bb1c7402ed04fd60272c9c40d13a4fd22bca (diff) | |
download | bun-5a19f8434bc1f1e1eb3cb7c79f15638b8cbcd64f.tar.gz bun-5a19f8434bc1f1e1eb3cb7c79f15638b8cbcd64f.tar.zst bun-5a19f8434bc1f1e1eb3cb7c79f15638b8cbcd64f.zip |
[WIP] fix corner cases with aliases dependencies (#2000)
* fix corner cases with aliases dependencies
* in-memory convert legacy `bun.lockb`
Diffstat (limited to 'src')
-rw-r--r-- | src/cli/package_manager_command.zig | 37 | ||||
-rw-r--r-- | src/install/extract_tarball.zig | 6 | ||||
-rw-r--r-- | src/install/install.zig | 194 | ||||
-rw-r--r-- | src/install/lockfile.zig | 417 | ||||
-rw-r--r-- | src/install/resolution.zig | 5 | ||||
-rw-r--r-- | src/resolver/resolver.zig | 3 |
6 files changed, 293 insertions, 369 deletions
diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index d81dcbab0..d783b86c9 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -6,6 +6,7 @@ const strings = @import("bun").strings; const Lockfile = @import("../install/lockfile.zig"); const NodeModulesFolder = Lockfile.Tree.NodeModulesFolder; const PackageID = @import("../install/install.zig").PackageID; +const DependencyID = @import("../install/install.zig").DependencyID; const PackageInstaller = @import("../install/install.zig").PackageInstaller; const Global = @import("bun").Global; const Output = @import("bun").Output; @@ -166,16 +167,7 @@ pub const PackageManagerCommand = struct { Output.flush(); Output.disableBuffering(); const lockfile = load_lockfile.ok; - - const parts = lockfile.packages.slice(); - const names = parts.items(.name); - - var iterator = Lockfile.Tree.Iterator.init( - lockfile.buffers.trees.items, - lockfile.buffers.hoisted_packages.items, - names, - lockfile.buffers.string_bytes.items, - ); + var iterator = Lockfile.Tree.Iterator.init(lockfile); var directories = std.ArrayList(NodeModulesFolder).init(ctx.allocator); defer directories.deinit(); @@ -183,13 +175,12 @@ pub const PackageManagerCommand = struct { const path = try ctx.allocator.alloc(u8, node_modules.relative_path.len); std.mem.copy(u8, path, node_modules.relative_path); - const packages = try ctx.allocator.alloc(PackageID, node_modules.packages.len); - std.mem.copy(PackageID, packages, node_modules.packages); + const dependencies = try ctx.allocator.alloc(DependencyID, node_modules.dependencies.len); + std.mem.copy(PackageID, dependencies, node_modules.dependencies); - const folder: NodeModulesFolder = .{ + const folder = NodeModulesFolder{ .relative_path = @ptrCast(stringZ, path), - .in = node_modules.in, - .packages = packages, + .dependencies = dependencies, }; directories.append(folder) catch unreachable; } @@ -198,7 +189,7 @@ pub const PackageManagerCommand = struct { // TODO: find max depth beforehand var more_packages = [_]bool{false} ** 16; - if (first_directory.packages.len > 1) more_packages[0] = true; + if (first_directory.dependencies.len > 1) more_packages[0] = true; const recurse = strings.leftHasAnyInRight(args, &.{ "-A", "-a", "--all" }); if (recurse) { @@ -214,6 +205,7 @@ pub const PackageManagerCommand = struct { Output.println("{s} node_modules ({d})", .{ path, package_ids.len }); Output.enableBuffering(); + const names = lockfile.packages.items(.name); const string_bytes = lockfile.buffers.string_bytes.items; for (package_ids) |package_id, i| { @@ -232,7 +224,7 @@ pub const PackageManagerCommand = struct { Output.prettyln( \\bun pm - package manager related commands - \\ + \\ \\ bun pm <b>bin<r> print the path to bin folder \\ bun pm <b>-g bin<r> print the <b>global<r> path to bin folder \\ bun pm <b>ls<r> list the dependency tree according to the current lockfile @@ -266,9 +258,7 @@ fn printNodeModulesFolderStructure( ) void { const allocator = lockfile.allocator; var more_packages = more_packages_; - const parts = lockfile.packages.slice(); - const names = parts.items(.name); - const resolutions = parts.items(.resolution); + const resolutions = lockfile.packages.items(.resolution); const string_bytes = lockfile.buffers.string_bytes.items; { @@ -318,8 +308,8 @@ fn printNodeModulesFolderStructure( } } - for (directory.packages) |package_id, package_index| { - const package_name_ = names[package_id].slice(string_bytes); + for (directory.dependencies) |dependency_id, index| { + const package_name_ = lockfile.buffers.dependencies.items[dependency_id].name.slice(string_bytes); const package_name = allocator.alloc(u8, package_name_.len) catch unreachable; defer allocator.free(package_name); std.mem.copy(u8, package_name, package_name_); @@ -327,10 +317,11 @@ fn printNodeModulesFolderStructure( var possible_path = std.fmt.allocPrint(allocator, "{s}/{s}/node_modules", .{ directory.relative_path, package_name }) catch unreachable; defer allocator.free(possible_path); - if (package_index + 1 == directory.packages.len) { + if (index + 1 == directory.dependencies.len) { more_packages[depth] = false; } + const package_id = lockfile.buffers.resolutions.items[dependency_id]; var dir_index: usize = 0; var found_node_modules = false; while (dir_index < directories.items.len) : (dir_index += 1) { diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 81fdf557d..7ba085297 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -6,9 +6,7 @@ const logger = bun.logger; const Output = bun.Output; const FileSystem = @import("../fs.zig").FileSystem; const Install = @import("./install.zig"); -const Features = Install.Features; -const Lockfile = Install.Lockfile; -const PackageID = Install.PackageID; +const DependencyID = Install.DependencyID; const PackageManager = Install.PackageManager; const Integrity = @import("./integrity.zig").Integrity; const Npm = @import("./npm.zig"); @@ -24,7 +22,7 @@ resolution: Resolution, registry: string, cache_dir: std.fs.Dir, temp_dir: std.fs.Dir, -package_id: PackageID, +dependency_id: DependencyID, skip_verify: bool = false, integrity: Integrity = Integrity{}, url: string = "", diff --git a/src/install/install.zig b/src/install/install.zig index 2c98666df..db12c2f3a 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1521,12 +1521,12 @@ pub const PackageManager = struct { return bun.cast(*const fn (ctx: *anyopaque, pm: *PackageManager) void, t.handler); } - pub inline fn getonDependencyError(t: @This()) *const fn (ctx: *anyopaque, Dependency, PackageID, anyerror) void { + pub inline fn getonDependencyError(t: @This()) *const fn (ctx: *anyopaque, Dependency, DependencyID, anyerror) void { return bun.cast(*const fn (ctx: *anyopaque, Dependency, PackageID, anyerror) void, t.handler); } }; - pub fn failRootResolution(this: *PackageManager, dependency: *const Dependency, dependency_id: PackageID, err: anyerror) void { + pub fn failRootResolution(this: *PackageManager, dependency: *const Dependency, dependency_id: DependencyID, err: anyerror) void { if (this.dynamic_root_dependencies) |*dynamic| { dynamic.items[dependency_id].failed = err; if (this.onWake.context) |ctx| { @@ -1559,7 +1559,7 @@ pub const PackageManager = struct { } const DependencyToEnqueue = union(enum) { - pending: PackageID, + pending: DependencyID, resolution: struct { package_id: PackageID, resolution: Resolution }, not_found: void, failure: anyerror, @@ -1586,7 +1586,7 @@ pub const PackageManager = struct { }, }; } - return .{ .pending = @truncate(u32, i) }; + return .{ .pending = @truncate(DependencyID, i) }; } } } @@ -1600,18 +1600,14 @@ pub const PackageManager = struct { }; dependency.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder); - builder.allocate() catch |err| { - return .{ .failure = err }; - }; + builder.allocate() catch |err| return .{ .failure = err }; const cloned_dependency = dependency.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable; builder.clamp(); - const index = @truncate(u32, root_deps.items.len); - root_deps.append( - .{ - .dependency = cloned_dependency, - }, - ) catch unreachable; + const index = @truncate(DependencyID, root_deps.items.len); + root_deps.append(.{ + .dependency = cloned_dependency, + }) catch unreachable; if (is_main) { this.enqueueDependencyWithMainAndSuccessFn( index, @@ -2179,7 +2175,7 @@ pub const PackageManager = struct { name_hash: PackageNameHash, name: String, version: Dependency.Version, - dependency_id: PackageID, + dependency_id: DependencyID, behavior: Behavior, manifest: *const Npm.PackageManifest, find_result: Npm.PackageManifest.FindResult, @@ -2246,6 +2242,7 @@ pub const PackageManager = struct { package.resolution.value.npm.version, ), manifest.str(&find_result.package.tarball_url), + dependency_id, package, ) orelse unreachable, }, @@ -2253,7 +2250,13 @@ pub const PackageManager = struct { }; } - pub fn generateNetworkTaskForTarball(this: *PackageManager, task_id: u64, url: string, package: Lockfile.Package) !?*NetworkTask { + pub fn generateNetworkTaskForTarball( + this: *PackageManager, + task_id: u64, + url: string, + dependency_id: DependencyID, + package: Lockfile.Package, + ) !?*NetworkTask { const dedupe_entry = try this.network_dedupe_map.getOrPut(this.allocator, task_id); if (dedupe_entry.found_existing) { return null; @@ -2272,7 +2275,7 @@ pub const PackageManager = struct { try network_task.forTarball( this.allocator, - ExtractTarball{ + .{ .package_manager = &PackageManager.instance, // https://github.com/ziglang/zig/issues/14005 .name = if (package.name.len() >= strings.StringOrTinyString.Max) strings.StringOrTinyString.init( @@ -2288,7 +2291,7 @@ pub const PackageManager = struct { .cache_dir = this.getCacheDirectory().dir, .temp_dir = this.getTemporaryDirectory().dir, .registry = scope.url.href, - .package_id = package.meta.id, + .dependency_id = dependency_id, .integrity = package.meta.integrity, .url = url, }, @@ -2306,9 +2309,9 @@ pub const PackageManager = struct { this.network_task_fifo.writeItemAssumeCapacity(task); } - const SuccessFn = *const fn (*PackageManager, PackageID, PackageID) void; + const SuccessFn = *const fn (*PackageManager, DependencyID, PackageID) void; const FailFn = *const fn (*PackageManager, *const Dependency, PackageID, anyerror) void; - fn assignResolution(this: *PackageManager, dependency_id: PackageID, package_id: PackageID) void { + fn assignResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { if (comptime Environment.allow_assert) { std.debug.assert(dependency_id < this.lockfile.buffers.resolutions.items.len); std.debug.assert(package_id < this.lockfile.packages.len); @@ -2317,7 +2320,7 @@ pub const PackageManager = struct { this.lockfile.buffers.resolutions.items[dependency_id] = package_id; } - fn assignRootResolution(this: *PackageManager, dependency_id: PackageID, package_id: PackageID) void { + fn assignRootResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { if (comptime Environment.allow_assert) { std.debug.assert(package_id < this.lockfile.packages.len); } @@ -2342,7 +2345,7 @@ pub const PackageManager = struct { name: String, version: Dependency.Version, behavior: Behavior, - dependency_id: PackageID, + dependency_id: DependencyID, resolution: PackageID, comptime successFn: SuccessFn, ) !?ResolvedPackageResult { @@ -2539,7 +2542,7 @@ pub const PackageManager = struct { try tmpfile.promote(tmpname, std.fs.cwd().fd, "yarn.lock"); } - pub fn isRootDependency(this: *const PackageManager, id: PackageID) bool { + pub fn isRootDependency(this: *const PackageManager, id: DependencyID) bool { if (this.dynamic_root_dependencies != null) { return false; } @@ -2549,7 +2552,7 @@ pub const PackageManager = struct { fn enqueueDependencyWithMain( this: *PackageManager, - id: u32, + id: DependencyID, /// This must be a *const to prevent UB dependency: *const Dependency, resolution: PackageID, @@ -2569,7 +2572,7 @@ pub const PackageManager = struct { /// A: "We enqueue it!" pub fn enqueueDependencyWithMainAndSuccessFn( this: *PackageManager, - id: u32, + id: DependencyID, /// This must be a *const to prevent UB dependency: *const Dependency, resolution: PackageID, @@ -2577,7 +2580,6 @@ pub const PackageManager = struct { comptime successFn: SuccessFn, comptime failFn: ?FailFn, ) !void { - const alias = dependency.name; const name = dependency.realname(); const name_hash = switch (dependency.version.tag) { @@ -2679,12 +2681,6 @@ pub const PackageManager = struct { }; if (resolve_result) |result| { - const buf = this.lockfile.buffers.string_bytes.items; - - if (!alias.eql(name, buf, buf)) { - try this.lockfile.alias_map.put(this.allocator, result.package.meta.id, alias); - } - // First time? if (result.is_first_time) { if (PackageManager.verbose_install) { @@ -2694,7 +2690,7 @@ pub const PackageManager = struct { this.lockfile.str(&result.package.name), label, this.lockfile.str(&result.package.name), - result.package.resolution.fmt(buf), + result.package.resolution.fmt(this.lockfile.buffers.string_bytes.items), }); } // Resolve dependencies first @@ -2820,7 +2816,7 @@ pub const PackageManager = struct { const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); - if (try this.generateNetworkTaskForTarball(task_id, url, package)) |network_task| { + if (try this.generateNetworkTaskForTarball(task_id, url, id, package)) |network_task| { this.enqueueNetworkTask(network_task); } }, @@ -2958,8 +2954,8 @@ pub const PackageManager = struct { // Step 1. Go through main dependencies { - var i: u32 = dependencies_list.off; - const end = dependencies_list.off + dependencies_list.len; + var i = dependencies_list.off; + const end = dependencies_list.off +| dependencies_list.len; // we have to be very careful with pointers here while (i < end) : (i += 1) { const dependency = lockfile.buffers.dependencies.items[i]; @@ -3057,18 +3053,14 @@ pub const PackageManager = struct { } const GitHubResolver = struct { - alias: string, - alias_ptr: *String, resolved: string, resolution: Resolution, pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void { - builder.count(this.alias); builder.count(this.resolved); } pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution { - this.alias_ptr.* = builder.append(String, this.alias); var resolution = this.resolution; resolution.value.github.resolved = builder.append(String, this.resolved); return resolution; @@ -3079,7 +3071,6 @@ pub const PackageManager = struct { fn processExtractedTarballPackage( manager: *PackageManager, package_id: *PackageID, - name: string, resolution: Resolution, data: ExtractData, comptime log_level: Options.LogLevel, @@ -3091,7 +3082,6 @@ pub const PackageManager = struct { data.json_buf[0..data.json_len], ); var package = Lockfile.Package{}; - var alias = String{}; Lockfile.Package.parse( manager.lockfile, @@ -3101,8 +3091,6 @@ pub const PackageManager = struct { package_json_source, GitHubResolver, GitHubResolver{ - .alias = name, - .alias_ptr = &alias, .resolved = data.resolved, .resolution = resolution, }, @@ -3111,7 +3099,7 @@ pub const PackageManager = struct { if (comptime log_level != .silent) { const string_buf = manager.lockfile.buffers.string_bytes.items; Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{ - package.resolution.fmtURL(&manager.options, alias.slice(string_buf), string_buf), + package.resolution.fmtURL(&manager.options, string_buf), @errorName(err), }); } @@ -3120,9 +3108,6 @@ pub const PackageManager = struct { package = manager.lockfile.appendPackage(package) catch unreachable; package_id.* = package.meta.id; - if (!strings.eql(name, manager.lockfile.str(&package.name))) { - manager.lockfile.alias_map.put(manager.allocator, package.meta.id, alias) catch unreachable; - } if (package.dependencies.len > 0) { manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch unreachable; @@ -3372,18 +3357,19 @@ pub const PackageManager = struct { .extract => |extract| { const response = task.http.response orelse { const err = task.http.err orelse error.TarballFailedToDownload; + const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; if (@TypeOf(callbacks.onPackageDownloadError) != void) { if (manager.dynamic_root_dependencies) |*root_deps| { for (root_deps.items) |*dep| { - if (dep.resolution_id == extract.package_id) { + if (dep.resolution_id == package_id) { dep.failed = err; } } } callbacks.onPackageDownloadError( extract_ctx, - extract.package_id, + package_id, extract.name.slice(), extract.resolution, err, @@ -3418,10 +3404,11 @@ pub const PackageManager = struct { 405...499 => error.TarballHTTP4xx, else => error.TarballHTTP5xx, }; + const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; if (manager.dynamic_root_dependencies) |*root_deps| { for (root_deps.items) |*dep| { - if (dep.resolution_id == extract.package_id) { + if (dep.resolution_id == package_id) { dep.failed = err; } } @@ -3429,7 +3416,7 @@ pub const PackageManager = struct { callbacks.onPackageDownloadError( extract_ctx, - extract.package_id, + package_id, extract.name.slice(), extract.resolution, err, @@ -3545,13 +3532,19 @@ pub const PackageManager = struct { } }, .extract => { + const tarball = task.request.extract.tarball; + const dependency_id = tarball.dependency_id; + var package_id = manager.lockfile.buffers.resolutions.items[dependency_id]; + const alias = tarball.name.slice(); + const resolution = tarball.resolution; + if (task.status == .fail) { const err = task.err orelse error.TarballFailedToExtract; if (@TypeOf(callbacks.onPackageDownloadError) != void) { if (manager.dynamic_root_dependencies) |*root_deps| { var deps: []Dependency.Pair = root_deps.items; for (deps) |*dep| { - if (dep.resolution_id == task.request.extract.tarball.package_id) { + if (dep.resolution_id == package_id) { dep.failed = dep.failed orelse err; } } @@ -3559,26 +3552,22 @@ pub const PackageManager = struct { callbacks.onPackageDownloadError( extract_ctx, - task.request.extract.tarball.package_id, - task.request.extract.tarball.name.slice(), - task.request.extract.tarball.resolution, + package_id, + alias, + resolution, err, task.request.extract.network.url_buf, ); } else if (comptime log_level != .silent) { const fmt = "<r><red>error<r>: {s} extracting tarball for <b>{s}<r>"; - const error_name: string = @errorName(err); const args = .{ - error_name, - task.request.extract.tarball.name.slice(), + @errorName(err), + alias, }; if (comptime log_level.showProgress()) { Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress); } else { - Output.prettyErrorln( - fmt, - args, - ); + Output.prettyErrorln(fmt, args); Output.flush(); } } @@ -3587,11 +3576,8 @@ pub const PackageManager = struct { manager.extracted_count += 1; bun.Analytics.Features.extracted_packages = true; - var package_id = task.request.extract.tarball.package_id; - const alias = task.request.extract.tarball.name.slice(); - const resolution = task.request.extract.tarball.resolution; // GitHub (and eventually tarball URL) dependencies are not fully resolved until after the tarball is downloaded & extracted. - if (manager.processExtractedTarballPackage(&package_id, alias, resolution, task.data.extract, comptime log_level)) |name| brk: { + if (manager.processExtractedTarballPackage(&package_id, resolution, task.data.extract, comptime log_level)) |name| brk: { // In the middle of an install, you could end up needing to downlaod the github tarball for a dependency // We need to make sure we resolve the dependencies first before calling the onExtract callback // TODO: move this into a separate function @@ -3639,12 +3625,12 @@ pub const PackageManager = struct { manager.setPreinstallState(package_id, manager.lockfile, .done); if (comptime @TypeOf(callbacks.onExtract) != void) { - callbacks.onExtract(extract_ctx, package_id, task.data.extract, comptime log_level); + callbacks.onExtract(extract_ctx, dependency_id, task.data.extract, comptime log_level); } if (comptime log_level.showProgress()) { if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, task.request.extract.tarball.name.slice(), ProgressStrings.extract_emoji, true); + manager.setNodeName(manager.downloads_node.?, alias, ProgressStrings.extract_emoji, true); has_updated_this_run = true; } } @@ -3681,10 +3667,10 @@ pub const PackageManager = struct { } pub const Options = struct { - log_level: LogLevel = LogLevel.default, + log_level: LogLevel = .default, global: bool = false, - global_bin_dir: std.fs.IterableDir = std.fs.IterableDir{ .dir = .{ .fd = std.math.maxInt(std.os.fd_t) } }, + global_bin_dir: std.fs.IterableDir = .{ .dir = .{ .fd = std.math.maxInt(std.os.fd_t) } }, explicit_global_directory: string = "", /// destination directory to link bins into // must be a variable due to global installs and bunx @@ -3695,18 +3681,18 @@ pub const PackageManager = struct { did_override_default_scope: bool = false, scope: Npm.Registry.Scope = undefined, - registries: Npm.Registry.Map = Npm.Registry.Map{}, + registries: Npm.Registry.Map = .{}, cache_directory: string = "", enable: Enable = .{}, do: Do = .{}, positionals: []const string = &[_]string{}, - update: Update = Update{}, + update: Update = .{}, dry_run: bool = false, - remote_package_features: Features = Features{ + remote_package_features: Features = .{ .optional_dependencies = true, .peer_dependencies = false, }, - local_package_features: Features = Features{ + local_package_features: Features = .{ .dev_dependencies = true, .peer_dependencies = false, }, @@ -5848,17 +5834,18 @@ pub const PackageManager = struct { platform_binlinks: std.ArrayListUnmanaged(DeferredBinLink) = std.ArrayListUnmanaged(DeferredBinLink){}, pub const DeferredBinLink = struct { - package_id: PackageID, + dependency_id: DependencyID, node_modules_folder: std.fs.IterableDir, }; /// Install versions of a package which are waiting on a network request pub fn installEnqueuedPackages( this: *PackageInstaller, - package_id: PackageID, + dependency_id: DependencyID, data: ExtractData, comptime log_level: Options.LogLevel, ) void { + const package_id = this.lockfile.buffers.resolutions.items[dependency_id]; const name = this.lockfile.str(&this.names[package_id]); const resolution = &this.resolutions[package_id]; const task_id = switch (resolution.tag) { @@ -5875,13 +5862,14 @@ pub const PackageManager = struct { for (callbacks.items) |cb| { const node_modules_folder = cb.node_modules_folder; this.node_modules_folder = .{ .dir = .{ .fd = @intCast(bun.FileDescriptor, node_modules_folder) } }; - this.installPackageWithNameAndResolution(package_id, log_level, name, resolution); + this.installPackageWithNameAndResolution(dependency_id, package_id, log_level, name, resolution); } } } fn installPackageWithNameAndResolution( this: *PackageInstaller, + dependency_id: PackageID, package_id: PackageID, comptime log_level: Options.LogLevel, name: string, @@ -5889,7 +5877,7 @@ pub const PackageManager = struct { ) void { const buf = this.lockfile.buffers.string_bytes.items; - const alias = if (this.lockfile.alias_map.get(package_id)) |str| str.slice(buf) else name; + const alias = this.lockfile.buffers.dependencies.items[dependency_id].name.slice(buf); const destination_dir_subpath: [:0]u8 = brk: { std.mem.copy(u8, &this.destination_dir_subpath_buf, alias); this.destination_dir_subpath_buf[alias.len] = 0; @@ -6033,7 +6021,7 @@ pub const PackageManager = struct { run_bin_link: { if (std.mem.indexOfScalar(PackageNameHash, this.options.native_bin_link_allowlist, String.Builder.stringHash(name)) != null) { this.platform_binlinks.append(this.lockfile.allocator, .{ - .package_id = package_id, + .dependency_id = dependency_id, .node_modules_folder = this.node_modules_folder, }) catch unreachable; break :run_bin_link; @@ -6083,6 +6071,7 @@ pub const PackageManager = struct { switch (resolution.tag) { .github => { this.manager.enqueueTarballForDownload( + dependency_id, package_id, &resolution.value.github, .{ @@ -6094,6 +6083,7 @@ pub const PackageManager = struct { if (comptime Environment.allow_assert) std.debug.assert(!resolution.value.npm.url.isEmpty()); this.manager.enqueuePackageForDownload( name, + dependency_id, package_id, resolution.value.npm.version, resolution.value.npm.url.slice(buf), @@ -6131,11 +6121,10 @@ pub const PackageManager = struct { pub fn installPackage( this: *PackageInstaller, - package_id: PackageID, + dependency_id: DependencyID, comptime log_level: Options.LogLevel, ) void { - // const package_id = ctx.package_id; - // const tree = ctx.trees[ctx.tree_id]; + const package_id = this.lockfile.buffers.resolutions.items[dependency_id]; const meta = &this.metas[package_id]; if (meta.isDisabled()) { @@ -6148,13 +6137,14 @@ pub const PackageManager = struct { const name = this.lockfile.str(&this.names[package_id]); const resolution = &this.resolutions[package_id]; - this.installPackageWithNameAndResolution(package_id, log_level, name, resolution); + this.installPackageWithNameAndResolution(dependency_id, package_id, log_level, name, resolution); } }; pub fn enqueuePackageForDownload( this: *PackageManager, name: []const u8, + dependency_id: PackageID, package_id: PackageID, version: Semver.Version, url: []const u8, @@ -6172,7 +6162,12 @@ pub const PackageManager = struct { ) catch unreachable; if (!task_queue.found_existing) { - if (this.generateNetworkTaskForTarball(task_id, url, this.lockfile.packages.get(package_id)) catch unreachable) |task| { + if (this.generateNetworkTaskForTarball( + task_id, + url, + dependency_id, + this.lockfile.packages.get(package_id), + ) catch unreachable) |task| { task.schedule(&this.network_tarball_batch); if (this.network_tarball_batch.len > 0) { _ = this.scheduleNetworkTasks(); @@ -6183,6 +6178,7 @@ pub const PackageManager = struct { pub fn enqueueTarballForDownload( this: *PackageManager, + dependency_id: PackageID, package_id: PackageID, repository: *const Repository, task_context: TaskCallbackContext, @@ -6200,7 +6196,12 @@ pub const PackageManager = struct { ) catch unreachable; if (!task_queue.found_existing) { - if (this.generateNetworkTaskForTarball(task_id, url, this.lockfile.packages.get(package_id)) catch unreachable) |task| { + if (this.generateNetworkTaskForTarball( + task_id, + url, + dependency_id, + this.lockfile.packages.get(package_id), + ) catch unreachable) |task| { task.schedule(&this.network_tarball_batch); if (this.network_tarball_batch.len > 0) { _ = this.scheduleNetworkTasks(); @@ -6278,12 +6279,7 @@ pub const PackageManager = struct { const resolution_lists: []const Lockfile.PackageIDSlice = parts.items(.resolutions); var resolutions = parts.items(.resolution); - var iterator = Lockfile.Tree.Iterator.init( - lockfile.buffers.trees.items, - lockfile.buffers.hoisted_packages.items, - names, - lockfile.buffers.string_bytes.items, - ); + var iterator = Lockfile.Tree.Iterator.init(lockfile); var installer = PackageInstaller{ .manager = this, @@ -6302,7 +6298,7 @@ pub const PackageManager = struct { .summary = &summary, .global_bin_dir = this.options.global_bin_dir, .force_install = force_install, - .install_count = lockfile.buffers.hoisted_packages.items.len, + .install_count = lockfile.buffers.hoisted_dependencies.items.len, .successfully_installed = try Bitset.initEmpty( this.allocator, lockfile.packages.len, @@ -6320,7 +6316,7 @@ pub const PackageManager = struct { installer.node_modules_folder = folder; - var remaining = node_modules.packages; + var remaining = node_modules.dependencies; // cache line is 64 bytes on ARM64 and x64 // PackageIDs are 4 bytes @@ -6352,8 +6348,8 @@ pub const PackageManager = struct { } } - for (remaining) |package_id| { - installer.installPackage(@truncate(PackageID, package_id), log_level); + for (remaining) |dependency_id| { + installer.installPackage(dependency_id, log_level); } try this.runTasks( @@ -6388,7 +6384,8 @@ pub const PackageManager = struct { summary.successfully_installed = installer.successfully_installed; outer: for (installer.platform_binlinks.items) |deferred| { - const package_id = deferred.package_id; + const dependency_id = deferred.dependency_id; + const package_id = resolutions_buffer[dependency_id]; const folder = deferred.node_modules_folder; const package_dependencies: []const Dependency = dependency_lists[package_id].get(dependencies); @@ -6406,10 +6403,7 @@ pub const PackageManager = struct { // Don't attempt to link incompatible binaries if (meta.isDisabled()) continue; - const name: string = brk: { - const alias = this.lockfile.alias_map.get(package_id) orelse installer.names[resolved_id]; - break :brk lockfile.str(&alias); - }; + const name = lockfile.str(&dependencies[dependency_id].name); if (!installer.has_created_bin) { if (!this.options.global) { diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 084ad7616..f2ab32467 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -61,12 +61,13 @@ const Repository = @import("./repository.zig").Repository; const StructBuilder = @import("../builder.zig"); const Bin = @import("./bin.zig").Bin; const Dependency = @import("./dependency.zig"); -const Behavior = @import("./dependency.zig").Behavior; +const Behavior = Dependency.Behavior; const FolderResolution = @import("./resolvers/folder_resolver.zig").FolderResolution; const PackageManager = @import("./install.zig").PackageManager; const ExternalSlice = @import("./install.zig").ExternalSlice; const ExternalSliceAligned = @import("./install.zig").ExternalSliceAligned; const PackageID = @import("./install.zig").PackageID; +const DependencyID = @import("./install.zig").DependencyID; const Features = @import("./install.zig").Features; const PackageInstall = @import("./install.zig").PackageInstall; const PackageNameHash = @import("./install.zig").PackageNameHash; @@ -84,7 +85,6 @@ const zero_hash = std.mem.zeroes(MetaHash); const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; -const AliasMap = std.ArrayHashMapUnmanaged(PackageID, String, ArrayIdentityContext, false); const NameHashMap = std.ArrayHashMapUnmanaged(u32, String, ArrayIdentityContext, false); // Serialized data @@ -105,7 +105,6 @@ allocator: std.mem.Allocator, scratch: Scratch = Scratch{}, scripts: Scripts = .{}, -alias_map: AliasMap = .{}, workspace_paths: NameHashMap = .{}, const Stream = std.io.FixedBufferStream([]u8); @@ -205,7 +204,6 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: std.mem.Allocator, l this.format = FormatVersion.current; this.scripts = .{}; - this.alias_map = .{}; this.workspace_paths = .{}; Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| { @@ -222,11 +220,11 @@ pub const InstallResult = struct { pub const Tree = struct { id: Id = invalid_id, - package_id: PackageID = invalid_package_id, + dependency_id: DependencyID = invalid_package_id, parent: Id = invalid_id, - packages: Lockfile.PackageIDSlice = Lockfile.PackageIDSlice{}, + dependencies: Lockfile.DependencyIDSlice = .{}, - pub const external_size = @sizeOf(Id) + @sizeOf(PackageID) + @sizeOf(Id) + @sizeOf(Lockfile.PackageIDSlice); + pub const external_size = @sizeOf(Id) + @sizeOf(PackageID) + @sizeOf(Id) + @sizeOf(Lockfile.DependencyIDSlice); pub const External = [external_size]u8; pub const Slice = ExternalSlice(Tree); pub const List = std.ArrayListUnmanaged(Tree); @@ -235,26 +233,27 @@ pub const Tree = struct { pub fn toExternal(this: Tree) External { var out = External{}; out[0..4].* = @bitCast(Id, this.id); - out[4..8].* = @bitCast(Id, this.package_id); + out[4..8].* = @bitCast(Id, this.dependency_id); out[8..12].* = @bitCast(Id, this.parent); - out[12..16].* = @bitCast(u32, this.packages.off); - out[16..20].* = @bitCast(u32, this.packages.len); + out[12..16].* = @bitCast(u32, this.dependencies.off); + out[16..20].* = @bitCast(u32, this.dependencies.len); if (out.len != 20) @compileError("Tree.External is not 20 bytes"); return out; } pub fn toTree(out: External) Tree { - return Tree{ + return .{ .id = @bitCast(Id, out[0..4].*), - .package_id = @bitCast(Id, out[4..8].*), + .dependency_id = @bitCast(Id, out[4..8].*), .parent = @bitCast(Id, out[8..12].*), - .packages = .{ + .dependencies = .{ .off = @bitCast(u32, out[12..16].*), .len = @bitCast(u32, out[16..20].*), }, }; } + pub const root_dep_id: DependencyID = invalid_package_id - 1; const invalid_id: Id = std.math.maxInt(Id); const dependency_loop = invalid_id - 1; const hoisted = invalid_id - 2; @@ -264,14 +263,14 @@ pub const Tree = struct { pub const NodeModulesFolder = struct { relative_path: stringZ, - in: PackageID, - packages: []const PackageID, + dependencies: []const DependencyID, }; pub const Iterator = struct { trees: []const Tree, - package_ids: []const PackageID, - names: []const String, + dependency_ids: []const DependencyID, + dependencies: []const Dependency, + resolutions: []const PackageID, tree_id: Id = 0, path_buf: [bun.MAX_PATH_BYTES]u8 = undefined, path_buf_len: usize = 0, @@ -281,28 +280,20 @@ pub const Tree = struct { // max number of node_modules folders depth_stack: [(bun.MAX_PATH_BYTES / "node_modules".len) + 1]Id = undefined, - pub fn init( - trees: []const Tree, - package_ids: []const PackageID, - names: []const String, - string_buf: string, - ) Iterator { - return Tree.Iterator{ - .trees = trees, - .package_ids = package_ids, - .names = names, - .tree_id = 0, - .path_buf = undefined, - .path_buf_len = 0, - .last_parent = invalid_id, - .string_buf = string_buf, + pub fn init(lockfile: *const Lockfile) Iterator { + return .{ + .trees = lockfile.buffers.trees.items, + .dependency_ids = lockfile.buffers.hoisted_dependencies.items, + .dependencies = lockfile.buffers.dependencies.items, + .resolutions = lockfile.buffers.resolutions.items, + .string_buf = lockfile.buffers.string_bytes.items, }; } pub fn nextNodeModulesFolder(this: *Iterator) ?NodeModulesFolder { if (this.tree_id >= this.trees.len) return null; - while (this.trees[this.tree_id].packages.len == 0) { + while (this.trees[this.tree_id].dependencies.len == 0) { this.tree_id += 1; if (this.tree_id >= this.trees.len) return null; } @@ -332,8 +323,7 @@ pub const Tree = struct { path_written += 1; const tree_id = this.depth_stack[depth_buf_len]; - - const name = this.names[this.trees[tree_id].package_id].slice(string_buf); + const name = this.dependencies[this.trees[tree_id].dependency_id].name.slice(string_buf); std.mem.copy(u8, this.path_buf[path_written..], name); path_written += name.len; @@ -347,10 +337,9 @@ pub const Tree = struct { this.tree_id += 1; var relative_path: [:0]u8 = this.path_buf[0..this.path_buf_len :0]; - return NodeModulesFolder{ + return .{ .relative_path = relative_path, - .in = tree.package_id, - .packages = tree.packages.get(this.package_ids), + .dependencies = tree.dependencies.get(this.dependency_ids), }; } }; @@ -358,111 +347,117 @@ pub const Tree = struct { const Builder = struct { allocator: std.mem.Allocator, name_hashes: []const PackageNameHash, - list: ArrayList = ArrayList{}, + list: ArrayList = .{}, resolutions: []const PackageID, dependencies: []const Dependency, - resolution_lists: []const Lockfile.PackageIDSlice, + resolution_lists: []const Lockfile.DependencyIDSlice, queue: Lockfile.TreeFiller, pub const Entry = struct { tree: Tree, - packages: Lockfile.PackageIDList, + dependencies: Lockfile.DependencyIDList, }; pub const ArrayList = std.MultiArrayList(Entry); /// Flatten the multi-dimensional ArrayList of package IDs into a single easily serializable array - pub fn clean(this: *Builder) ![]PackageID { + pub fn clean(this: *Builder) !DependencyIDList { var end = @truncate(Id, this.list.len); var i: Id = 0; - var total_packages_count: u32 = 0; + var total: u32 = 0; var trees = this.list.items(.tree); - var packages = this.list.items(.packages); + var dependencies = this.list.items(.dependencies); // var real_end: Id = 0; // TODO: can we cull empty trees here? while (i < end) : (i += 1) { - total_packages_count += trees[i].packages.len; - // if (!(prev == total_packages_count and trees[i].package_id >= max_package_id)) { - // trees[real_end] = trees[i]; - // packages[real_end] = packages[i]; - // real_end += 1; - // } - } - // this.list.len = real_end; - // trees = trees[0..real_end]; - // packages = packages[0..real_end]; - - var package_ids = try z_allocator.alloc(PackageID, total_packages_count); + total += trees[i].dependencies.len; + } + + var dependency_ids = try DependencyIDList.initCapacity(z_allocator, total); var next = PackageIDSlice{}; - for (trees) |tree, id| { - if (tree.packages.len > 0) { - var child = packages[id]; + for (trees) |*tree, id| { + if (tree.dependencies.len > 0) { + var child = dependencies[id]; const len = @truncate(PackageID, child.items.len); next.off += next.len; next.len = len; - trees[id].packages = next; - std.mem.copy(PackageID, package_ids[next.off..][0..next.len], child.items); + tree.dependencies = next; + dependency_ids.appendSliceAssumeCapacity(child.items); child.deinit(this.allocator); } } this.queue.deinit(); - return package_ids; + return dependency_ids; } }; pub fn processSubtree( - this: *Tree, - package_id: PackageID, + this: *const Tree, + dependency_id: DependencyID, builder: *Builder, ) SubtreeError!void { try builder.list.append(builder.allocator, .{ - .tree = Tree{ + .tree = .{ .parent = this.id, .id = @truncate(Id, builder.list.len), - .package_id = package_id, + .dependency_id = dependency_id, }, - .packages = .{}, + .dependencies = .{}, }); const list_slice = builder.list.slice(); const trees = list_slice.items(.tree); - const package_lists = list_slice.items(.packages); + const dependency_lists = list_slice.items(.dependencies); const next: *Tree = &trees[builder.list.len - 1]; + const package_id = switch (dependency_id) { + root_dep_id => 0, + else => |id| builder.resolutions[id], + }; const resolution_list = builder.resolution_lists[package_id]; - const resolutions: []const PackageID = resolution_list.get(builder.resolutions); - if (resolutions.len == 0) return; + if (resolution_list.len == 0) return; const name_hashes: []const PackageNameHash = builder.name_hashes; const max_package_id = @truncate(PackageID, name_hashes.len); - const dependencies: []const Dependency = builder.dependencies[resolution_list.off..][0..resolution_list.len]; + var dep_id = resolution_list.off; + const end = dep_id + resolution_list.len; - for (resolutions) |pid, j| { + while (dep_id < end) : (dep_id += 1) { + const pid = builder.resolutions[dep_id]; if (pid >= max_package_id) continue; - - const dependency = dependencies[j]; + const dependency = builder.dependencies[dep_id]; // Do not download/install "peerDependencies" if (dependency.behavior.isPeer()) continue; // Do not hoist aliased packages - const destination = if (dependency.name_hash != name_hashes[pid]) brk: { - package_lists[next.id].append(builder.allocator, pid) catch unreachable; - next.packages.len += 1; + const destination = if (dependency.name_hash == name_hashes[pid]) next.addDependency( + true, + pid, + dep_id, + &dependency, + dependency_lists, + trees, + builder, + ) else brk: { + dependency_lists[next.id].append(builder.allocator, dep_id) catch unreachable; + next.dependencies.len += 1; break :brk next.id; - } else next.addDependency(true, pid, name_hashes, package_lists, trees, builder.allocator); - + }; switch (destination) { Tree.dependency_loop => return error.DependencyLoop, Tree.hoisted => continue, else => if (builder.resolution_lists[pid].len > 0) { - try builder.queue.writeItem([2]PackageID{ pid, destination }); + try builder.queue.writeItem(.{ + .tree_id = destination, + .dependency_id = dep_id, + }); }, } } @@ -472,21 +467,20 @@ pub const Tree = struct { // - de-duplicate (skip) the package // - move the package to the top directory // - leave the package at the same (relative) directory - pub fn addDependency( + fn addDependency( this: *Tree, comptime as_defined: bool, package_id: PackageID, - name_hashes: []const PackageNameHash, - lists: []Lockfile.PackageIDList, + dependency_id: DependencyID, + dependency: *const Dependency, + dependency_lists: []Lockfile.DependencyIDList, trees: []Tree, - allocator: std.mem.Allocator, + builder: *Builder, ) Id { - const name_hash = name_hashes[package_id]; - const this_packages = this.packages.get(lists[this.id].items); - - for (this_packages) |pid| { - if (name_hashes[pid] != name_hash) continue; - if (pid != package_id) return dependency_loop; + const this_dependencies = this.dependencies.get(dependency_lists[this.id].items); + for (this_dependencies) |dep_id| { + if (builder.dependencies[dep_id].name_hash != dependency.name_hash) continue; + if (builder.resolutions[dep_id] != package_id) return dependency_loop; return hoisted; } @@ -494,29 +488,35 @@ pub const Tree = struct { const id = trees[this.parent].addDependency( false, package_id, - name_hashes, - lists, + dependency_id, + dependency, + dependency_lists, trees, - allocator, + builder, ); if (!as_defined or id != dependency_loop) return id; } - lists[this.id].append(allocator, package_id) catch unreachable; - this.packages.len += 1; + dependency_lists[this.id].append(builder.allocator, dependency_id) catch unreachable; + this.dependencies.len += 1; return this.id; } }; -/// This conditonally clones the lockfile with root packages marked as non-resolved that do not satisfy `Features`. The package may still end up installed even if it was e.g. in "devDependencies" and its a production install. In that case, it would be installed because another dependency or transient dependency needed it +/// This conditonally clones the lockfile with root packages marked as non-resolved +/// that do not satisfy `Features`. The package may still end up installed even +/// if it was e.g. in "devDependencies" and its a production install. In that case, +/// it would be installed because another dependency or transient dependency needed it. /// -/// Warning: This potentially modifies the existing lockfile in-place. That is safe to do because at this stage, the lockfile has already been saved to disk. Our in-memory representation is all that's left. +/// Warning: This potentially modifies the existing lockfile in-place. That is +/// safe to do because at this stage, the lockfile has already been saved to disk. +/// Our in-memory representation is all that's left. pub fn maybeCloneFilteringRootPackages( old: *Lockfile, features: Features, ) !*Lockfile { - const old_root_dependenices_list: DependencySlice = old.packages.items(.dependencies)[0]; - var old_root_resolutions: PackageIDSlice = old.packages.items(.resolutions)[0]; + const old_root_dependenices_list = old.packages.items(.dependencies)[0]; + var old_root_resolutions = old.packages.items(.resolutions)[0]; const root_dependencies = old_root_dependenices_list.get(old.buffers.dependencies.items); var resolutions = old_root_resolutions.mut(old.buffers.resolutions.items); var any_changes = false; @@ -741,7 +741,11 @@ pub fn fmtMetaHash(this: *const Lockfile) MetaHashFormatter { }; } -pub const TreeFiller = std.fifo.LinearFifo([2]PackageID, .Dynamic); +pub const FillItem = struct { + tree_id: Tree.Id, + dependency_id: DependencyID, +}; +pub const TreeFiller = std.fifo.LinearFifo(FillItem, .Dynamic); const Cloner = struct { clone_queue: PendingResolutions, @@ -782,104 +786,29 @@ const Cloner = struct { } fn hoist(this: *Cloner) anyerror!void { - const max = @truncate(PackageID, this.lockfile.packages.len); - if (max == 0) return; - var allocator = this.lockfile.allocator; - - var tree_list = Tree.Builder.ArrayList{}; + if (this.lockfile.packages.len == 0) return; + var allocator = this.lockfile.allocator; var slice = this.lockfile.packages.slice(); - const unique_packages = this.lockfile.unique_packages; - - var resolutions_lists: []const PackageIDSlice = slice.items(.resolutions); - const name_hashes: []const PackageNameHash = slice.items(.name_hash); - const resolutions_buffer: []const PackageID = this.lockfile.buffers.resolutions.items; - // populate the root of the tree with: - // - packages where only one version exists in the tree and they have no dependencies - // - dependencies from package.json - // Dependencies from package.json must always be put into the tree - - var root_packages_count: u32 = resolutions_lists[0].len; - for (resolutions_lists[1..]) |list, package_id| { - if (list.len > 0 or !unique_packages.isSet(package_id + 1)) continue; - root_packages_count += 1; - } - - var root_package_list = try PackageIDList.initCapacity(allocator, root_packages_count); - const root_resolutions: []const PackageID = resolutions_lists[0].get(resolutions_buffer); - - try tree_list.ensureTotalCapacity(allocator, root_packages_count); - tree_list.len = root_packages_count; - - for (resolutions_lists[1..]) |list, package_id_| { - const package_id = @intCast(PackageID, package_id_ + 1); - if (list.len > 0 or - !unique_packages.isSet(package_id) or - std.mem.indexOfScalar(PackageID, root_package_list.items, package_id) != null) - continue; - root_package_list.appendAssumeCapacity(package_id); - } - - var tree_filler_queue: TreeFiller = TreeFiller.init(allocator); - try tree_filler_queue.ensureUnusedCapacity(root_resolutions.len); - - var possible_duplicates_len = root_package_list.items.len; - for (root_resolutions) |package_id| { - if (package_id >= max) continue; - if (std.mem.indexOfScalar(PackageID, root_package_list.items[0..possible_duplicates_len], package_id) != null) continue; - - root_package_list.appendAssumeCapacity(package_id); - } - { - var sliced = tree_list.slice(); - var trees = sliced.items(.tree); - var packages = sliced.items(.packages); - trees[0] = .{ - .parent = Tree.invalid_id, - .id = 0, - .packages = .{ - .len = @truncate(PackageID, root_package_list.items.len), - }, - }; - packages[0] = root_package_list; - - std.mem.set(PackageIDList, packages[1..], PackageIDList{}); - std.mem.set(Tree, trees[1..], Tree{}); - } - var builder = Tree.Builder{ - .name_hashes = name_hashes, - .list = tree_list, - .queue = tree_filler_queue, - .resolution_lists = resolutions_lists, - .resolutions = resolutions_buffer, + .name_hashes = slice.items(.name_hash), + .queue = TreeFiller.init(allocator), + .resolution_lists = slice.items(.resolutions), + .resolutions = this.lockfile.buffers.resolutions.items, .allocator = allocator, .dependencies = this.lockfile.buffers.dependencies.items, }; - var builder_ = &builder; - - for (root_resolutions) |package_id| { - if (package_id >= max) continue; - - try builder.list.items(.tree)[0].processSubtree( - package_id, - builder_, - ); - } + try (Tree{}).processSubtree(Tree.root_dep_id, &builder); // This goes breadth-first - while (builder.queue.readItem()) |pids| { - try builder.list.items(.tree)[pids[1]].processSubtree(pids[0], builder_); + while (builder.queue.readItem()) |item| { + try builder.list.items(.tree)[item.tree_id].processSubtree(item.dependency_id, &builder); } - var tree_packages = try builder.clean(); - this.lockfile.buffers.hoisted_packages = Lockfile.PackageIDList{ - .items = tree_packages, - .capacity = tree_packages.len, - }; + this.lockfile.buffers.hoisted_dependencies = try builder.clean(); { const final = builder.list.items(.tree); - this.lockfile.buffers.trees = Tree.List{ + this.lockfile.buffers.trees = .{ .items = final, .capacity = final.len, }; @@ -1033,38 +962,38 @@ pub const Printer = struct { ); var slice = this.lockfile.packages.slice(); - const names: []const String = slice.items(.name); - const names_hashes: []const PackageNameHash = slice.items(.name_hash); const bins: []const Bin = slice.items(.bin); const resolved: []const Resolution = slice.items(.resolution); - if (names.len == 0) return; + if (resolved.len == 0) return; const resolutions_list = slice.items(.resolutions); - const resolutions_buffer = this.lockfile.buffers.resolutions.items; + const resolutions_buffer: []const PackageID = this.lockfile.buffers.resolutions.items; + const dependencies_buffer: []const Dependency = this.lockfile.buffers.dependencies.items; const string_buf = this.lockfile.buffers.string_bytes.items; - var id_map = try default_allocator.alloc(PackageID, this.updates.len); - std.mem.set(PackageID, id_map, std.math.maxInt(PackageID)); + var id_map = try default_allocator.alloc(DependencyID, this.updates.len); + std.mem.set(DependencyID, id_map, invalid_package_id); defer if (id_map.len > 0) default_allocator.free(id_map); visited.set(0); - const end = @truncate(PackageID, names.len); + const end = @truncate(PackageID, resolved.len); if (this.successfully_installed) |installed| { - outer: for (resolutions_list[0].get(resolutions_buffer)) |package_id| { - if (package_id > end) continue; + var dep_id = resolutions_list[0].off; + const dep_end = dep_id + resolutions_list[0].len; + outer: while (dep_id < dep_end) : (dep_id += 1) { + const package_id = resolutions_buffer[dep_id]; + if (package_id >= end) continue; const is_new = installed.isSet(package_id); - const package_name = brk: { - const alias = this.lockfile.alias_map.get(package_id) orelse names[package_id]; - break :brk alias.slice(string_buf); - }; + const dependency = dependencies_buffer[dep_id]; + const package_name = dependency.name.slice(string_buf); if (this.updates.len > 0) { - const name_hash = names_hashes[package_id]; + const name_hash = dependency.name_hash; for (this.updates) |update, update_id| { if (update.failed) return; if (update.name.len == package_name.len and name_hash == update.name_hash) { - if (id_map[update_id] == std.math.maxInt(PackageID)) { - id_map[update_id] = @truncate(PackageID, package_id); + if (id_map[update_id] == invalid_package_id) { + id_map[update_id] = @truncate(DependencyID, dep_id); } continue :outer; @@ -1091,17 +1020,20 @@ pub const Printer = struct { ); } } else { - outer: for (names) |name, package_id| { - const package_name = name.slice(string_buf); + outer: for (dependencies_buffer) |dependency, dep_id| { + const package_id = resolutions_buffer[dep_id]; + if (package_id >= end) continue; + + const package_name = dependency.name.slice(string_buf); if (this.updates.len > 0) { - const name_hash = names_hashes[package_id]; + const name_hash = dependency.name_hash; for (this.updates) |update, update_id| { if (update.failed) return; if (update.name.len == package_name.len and name_hash == update.name_hash) { - if (id_map[update_id] == std.math.maxInt(PackageID)) { - id_map[update_id] = @truncate(PackageID, package_id); + if (id_map[update_id] == invalid_package_id) { + id_map[update_id] = @truncate(DependencyID, dep_id); } continue :outer; @@ -1124,9 +1056,10 @@ pub const Printer = struct { } for (this.updates) |_, update_id| { - const package_id = id_map[update_id]; - if (package_id == std.math.maxInt(PackageID)) continue; - const name = this.lockfile.alias_map.get(package_id) orelse names[package_id]; + const dependency_id = id_map[update_id]; + if (dependency_id == invalid_package_id) continue; + const name = dependencies_buffer[dependency_id].name; + const package_id = resolutions_buffer[dependency_id]; const bin = bins[package_id]; const package_name = name.slice(string_buf); @@ -1329,7 +1262,7 @@ pub const Printer = struct { try writer.writeAll(" resolved "); - const url_formatter = resolution.fmtURL(&this.options, name, string_buf); + const url_formatter = resolution.fmtURL(&this.options, string_buf); // Resolved URL is always quoted try std.fmt.format(writer, "\"{any}\"\n", .{url_formatter}); @@ -1404,7 +1337,7 @@ pub fn verifyData(this: *Lockfile) !void { } pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_features: Features, comptime log_level: PackageManager.Options.LogLevel) void { - const resolutions_list: []const PackageIDSlice = this.packages.items(.resolutions); + const resolutions_list: []const DependencyIDSlice = this.packages.items(.resolutions); const dependency_lists: []const DependencySlice = this.packages.items(.dependencies); const dependencies_buffer = this.buffers.dependencies.items; const resolutions_buffer = this.buffers.resolutions.items; @@ -1523,7 +1456,6 @@ pub fn initEmpty(this: *Lockfile, allocator: std.mem.Allocator) !void { .allocator = allocator, .scratch = Scratch.init(allocator), .scripts = .{}, - .alias_map = .{}, .workspace_paths = .{}, }; } @@ -1794,32 +1726,35 @@ pub const FormatVersion = enum(u32) { pub const current = FormatVersion.v2; }; -pub const DependencySlice = ExternalSlice(Dependency); pub const PackageIDSlice = ExternalSlice(PackageID); +pub const DependencySlice = ExternalSlice(Dependency); +pub const DependencyIDSlice = ExternalSlice(DependencyID); pub const PackageIDList = std.ArrayListUnmanaged(PackageID); pub const DependencyList = std.ArrayListUnmanaged(Dependency); +pub const DependencyIDList = std.ArrayListUnmanaged(DependencyID); + pub const StringBuffer = std.ArrayListUnmanaged(u8); pub const ExternalStringBuffer = std.ArrayListUnmanaged(ExternalString); pub const Package = extern struct { - name: String = String{}, + name: String = .{}, name_hash: PackageNameHash = 0, /// How a package has been resolved /// When .tag is uninitialized, that means the package is not resolved yet. - resolution: Resolution = Resolution{}, + resolution: Resolution = .{}, /// dependencies & resolutions must be the same length /// resolutions[i] is the resolved package ID for dependencies[i] /// if resolutions[i] is an invalid package ID, then dependencies[i] is not resolved - dependencies: DependencySlice = DependencySlice{}, + dependencies: DependencySlice = .{}, /// The resolved package IDs for the dependencies - resolutions: PackageIDSlice = PackageIDSlice{}, + resolutions: DependencyIDSlice = .{}, - meta: Meta = Meta{}, - bin: Bin = Bin{}, + meta: Meta = .{}, + bin: Bin = .{}, pub fn verify(this: *const Package, externs: []const ExternalString) void { if (comptime !Environment.allow_assert) @@ -1880,11 +1815,6 @@ pub const Package = extern struct { this.resolution.count(old_string_buf, *Lockfile.StringBuilder, builder); this.meta.count(old_string_buf, *Lockfile.StringBuilder, builder); const new_extern_string_count = this.bin.count(old_string_buf, old_extern_string_buf, *Lockfile.StringBuilder, builder); - - if (old.alias_map.get(this.meta.id)) |*alias| { - builder.count(old.str(alias)); - } - const old_dependencies: []const Dependency = this.dependencies.get(old.buffers.dependencies.items); const old_resolutions: []const PackageID = this.resolutions.get(old.buffers.resolutions.items); @@ -1942,10 +1872,6 @@ pub const Package = extern struct { package_id_mapping[this.meta.id] = new_package.meta.id; - if (old.alias_map.get(this.meta.id)) |*alias| { - try new.alias_map.put(new.allocator, new_package.meta.id, builder.append(String, old.str(alias))); - } - for (old_dependencies) |dependency, i| { dependencies[i] = try dependency.clone( old_string_buf, @@ -3153,13 +3079,12 @@ pub fn deinit(this: *Lockfile) void { this.unique_packages.deinit(this.allocator); this.string_pool.deinit(); this.scripts.deinit(this.allocator); - this.alias_map.deinit(this.allocator); this.workspace_paths.deinit(this.allocator); } const Buffers = struct { trees: Tree.List = .{}, - hoisted_packages: PackageIDList = .{}, + hoisted_dependencies: DependencyIDList = .{}, resolutions: PackageIDList = .{}, dependencies: DependencyList = .{}, extern_strings: ExternalStringBuffer = .{}, @@ -3338,7 +3263,18 @@ const Buffers = struct { } } - pub fn load(stream: *Stream, allocator: std.mem.Allocator, log: *logger.Log, alias_map: *AliasMap) !Buffers { + pub fn legacyPackageToDependencyID(this: Buffers, package_id: PackageID) !DependencyID { + switch (package_id) { + 0 => return Tree.root_dep_id, + invalid_package_id => return invalid_package_id, + else => for (this.resolutions.items) |pkg_id, dep_id| { + if (pkg_id == package_id) return @truncate(DependencyID, dep_id); + }, + } + return error.@"Lockfile is missing resolution data"; + } + + pub fn load(stream: *Stream, allocator: std.mem.Allocator, log: *logger.Log) !Buffers { var this = Buffers{}; var external_dependency_list_: std.ArrayListUnmanaged(Dependency.External) = std.ArrayListUnmanaged(Dependency.External){}; @@ -3391,12 +3327,17 @@ const Buffers = struct { this.dependencies.expandToCapacity(); this.dependencies.items.len = external_dependency_list.len; for (external_dependency_list) |external_dep, i| { - const dep = Dependency.toDependency(external_dep, extern_context); - if (dep.isAliased(string_buf)) { - try alias_map.put(allocator, this.resolutions.items[i], dep.name); - } + this.dependencies.items[i] = Dependency.toDependency(external_dep, extern_context); + } - this.dependencies.items[i] = dep; + // Legacy tree structure stores package IDs instead of dependency IDs + if (this.trees.items[0].dependency_id != Tree.root_dep_id) { + for (this.trees.items) |*tree| { + tree.dependency_id = try this.legacyPackageToDependencyID(tree.dependency_id); + } + for (this.hoisted_dependencies.items) |package_id, i| { + this.hoisted_dependencies.items[i] = try this.legacyPackageToDependencyID(package_id); + } } return this; @@ -3465,7 +3406,7 @@ pub const Serializer = struct { total_buffer_size, allocator, ); - lockfile.buffers = try Lockfile.Buffers.load(stream, allocator, log, &lockfile.alias_map); + lockfile.buffers = try Lockfile.Buffers.load(stream, allocator, log); if ((try stream.reader().readIntLittle(u64)) != 0) { return error.@"Lockfile is malformed (expected 0 at the end)"; } diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 265dc402d..6d219d59f 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -121,8 +121,8 @@ pub const Resolution = extern struct { return Formatter{ .resolution = this, .buf = buf }; } - pub fn fmtURL(this: *const Resolution, options: *const PackageManager.Options, name: string, buf: []const u8) URLFormatter { - return URLFormatter{ .resolution = this, .buf = buf, .package_name = name, .options = options }; + pub fn fmtURL(this: *const Resolution, options: *const PackageManager.Options, buf: []const u8) URLFormatter { + return URLFormatter{ .resolution = this, .buf = buf, .options = options }; } pub fn eql( @@ -193,7 +193,6 @@ pub const Resolution = extern struct { pub const URLFormatter = struct { resolution: *const Resolution, options: *const PackageManager.Options, - package_name: string, buf: []const u8, diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 21b4df14f..b2be261d5 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -330,7 +330,7 @@ pub const PendingResolution = struct { esm: ESModule.Package.External = .{}, dependency: Dependency.Version = .{}, resolution_id: Install.PackageID = Install.invalid_package_id, - root_dependency_id: Install.PackageID = Install.invalid_package_id, + root_dependency_id: Install.DependencyID = Install.invalid_package_id, import_record_id: u32 = std.math.maxInt(u32), string_buf: []u8 = "", tag: Tag, @@ -1687,6 +1687,7 @@ pub const Resolver = struct { if (st == .extract) manager.enqueuePackageForDownload( esm.name, + manager.lockfile.buffers.legacyPackageToDependencyID(resolved_package_id) catch unreachable, resolved_package_id, resolution.value.npm.version, manager.lockfile.str(&resolution.value.npm.url), |