diff options
author | 2023-03-28 23:45:00 +0300 | |
---|---|---|
committer | 2023-03-28 13:45:00 -0700 | |
commit | 0a914902269ebb1f5612385bd8b65aa1de4db71e (patch) | |
tree | caaa275d3bb9de05827cbf162dd71034659a7f82 | |
parent | 93585291f97df02b4a74c731e37971f78d7fbde0 (diff) | |
download | bun-0a914902269ebb1f5612385bd8b65aa1de4db71e.tar.gz bun-0a914902269ebb1f5612385bd8b65aa1de4db71e.tar.zst bun-0a914902269ebb1f5612385bd8b65aa1de4db71e.zip |
[install] support tarball URLs (#2497)
closes #2448
-rw-r--r-- | src/install/dependency.zig | 31 | ||||
-rw-r--r-- | src/install/extract_tarball.zig | 4 | ||||
-rw-r--r-- | src/install/install.zig | 254 | ||||
-rw-r--r-- | test/cli/install/bun-install.test.ts | 190 |
4 files changed, 450 insertions, 29 deletions
diff --git a/src/install/dependency.zig b/src/install/dependency.zig index d93629a82..8f02cc404 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -15,7 +15,7 @@ const string = @import("../string_types.zig").string; const strings = @import("../string_immutable.zig"); const Dependency = @This(); -pub const URI = union(Tag) { +const URI = union(Tag) { local: String, remote: String, @@ -117,10 +117,11 @@ pub const Context = struct { /// Get the name of the package as it should appear in a remote registry. pub inline fn realname(this: *const Dependency) String { return switch (this.version.tag) { - .npm => this.version.value.npm.name, .dist_tag => this.version.value.dist_tag.name, .git => this.version.value.git.package_name, .github => this.version.value.github.package_name, + .npm => this.version.value.npm.name, + .tarball => this.version.value.tarball.package_name, else => this.name, }; } @@ -131,6 +132,7 @@ pub inline fn isAliased(this: *const Dependency, buf: []const u8) bool { .dist_tag => !this.version.value.dist_tag.name.eql(this.name, buf, buf), .git => !this.version.value.git.package_name.eql(this.name, buf, buf), .github => !this.version.value.github.package_name.eql(this.name, buf, buf), + .tarball => !this.version.value.tarball.package_name.eql(this.name, buf, buf), else => false, }; } @@ -560,7 +562,16 @@ pub const Version = struct { tag: String, fn eql(this: TagInfo, that: TagInfo, this_buf: []const u8, that_buf: []const u8) bool { - return this.name.eql(that.name, this_buf, that_buf) and this.tag.eql(that.tag); + return this.name.eql(that.name, this_buf, that_buf) and this.tag.eql(that.tag, this_buf, that_buf); + } + }; + + const TarballInfo = struct { + uri: URI, + package_name: String = .{}, + + fn eql(this: TarballInfo, that: TarballInfo, this_buf: []const u8, that_buf: []const u8) bool { + return this.uri.eql(that.uri, this_buf, that_buf); } }; @@ -569,7 +580,7 @@ pub const Version = struct { npm: NpmInfo, dist_tag: TagInfo, - tarball: URI, + tarball: TarballInfo, folder: String, /// Equivalent to npm link @@ -807,13 +818,13 @@ pub fn parseWithTag( return .{ .tag = .tarball, .literal = sliced.value(), - .value = .{ .tarball = .{ .remote = sliced.sub(dependency).value() } }, + .value = .{ .tarball = .{ .uri = .{ .remote = sliced.sub(dependency).value() } } }, }; } else if (strings.hasPrefixComptime(dependency, "file://")) { return .{ .tag = .tarball, .literal = sliced.value(), - .value = .{ .tarball = .{ .local = sliced.sub(dependency[7..]).value() } }, + .value = .{ .tarball = .{ .uri = .{ .local = sliced.sub(dependency[7..]).value() } } }, }; } else if (strings.contains(dependency, "://")) { if (log_) |log| log.addErrorFmt(null, logger.Loc.Empty, allocator, "invalid or unsupported dependency \"{s}\"", .{dependency}) catch unreachable; @@ -821,13 +832,9 @@ pub fn parseWithTag( } return .{ - .literal = sliced.value(), - .value = .{ - .tarball = .{ - .local = sliced.value(), - }, - }, .tag = .tarball, + .literal = sliced.value(), + .value = .{ .tarball = .{ .uri = .{ .local = sliced.value() } } }, }; }, .folder => { diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index bec5f5eff..b2664392e 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -19,7 +19,6 @@ const ExtractTarball = @This(); name: strings.StringOrTinyString, resolution: Resolution, -registry: string, cache_dir: std.fs.Dir, temp_dir: std.fs.Dir, dependency_id: DependencyID, @@ -289,6 +288,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD const folder_name = switch (this.resolution.tag) { .npm => this.package_manager.cachedNPMPackageFolderNamePrint(&folder_name_buf, name, this.resolution.value.npm.version), .github => PackageManager.cachedGitHubFolderNamePrint(&folder_name_buf, resolved), + .local_tarball, .remote_tarball => PackageManager.cachedTarballFolderNamePrint(&folder_name_buf, this.url), else => unreachable, }; if (folder_name.len == 0 or (folder_name.len == 1 and folder_name[0] == '/')) @panic("Tried to delete root and stopped it"); @@ -362,7 +362,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD var json_buf: []u8 = ""; var json_len: usize = 0; switch (this.resolution.tag) { - .github => { + .github, .local_tarball, .remote_tarball => { const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| { this.package_manager.log.addErrorFmt( null, diff --git a/src/install/install.zig b/src/install/install.zig index 91674b95d..75a7e28aa 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -172,6 +172,7 @@ const NetworkTask = struct { extract: ExtractTarball, git_clone: void, git_checkout: void, + local_tarball: void, }, pub fn notify(this: *NetworkTask, _: anytype) void { @@ -677,14 +678,45 @@ const Task = struct { this.status = Status.success; manager.resolve_tasks.writeItem(this.*) catch unreachable; }, + .local_tarball => { + const result = readAndExtract( + this.package_manager.allocator, + this.request.local_tarball.tarball, + ) catch |err| { + if (comptime Environment.isDebug) { + if (@errorReturnTrace()) |trace| { + std.debug.dumpStackTrace(trace.*); + } + } + + this.err = err; + this.status = Status.fail; + this.data = .{ .extract = .{} }; + this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable; + return; + }; + + this.data = .{ .extract = result }; + this.status = Status.success; + this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable; + }, } } - pub const Tag = enum(u2) { + fn readAndExtract(allocator: std.mem.Allocator, tarball: ExtractTarball) !ExtractData { + const file = try std.fs.cwd().openFile(tarball.url, .{ .mode = .read_only }); + defer file.close(); + const bytes = try file.readToEndAlloc(allocator, std.math.maxInt(usize)); + defer allocator.free(bytes); + return tarball.run(bytes); + } + + pub const Tag = enum(u3) { package_manifest = 0, extract = 1, git_clone = 2, git_checkout = 3, + local_tarball = 4, }; pub const Status = enum { @@ -723,6 +755,9 @@ const Task = struct { resolved: strings.StringOrTinyString, resolution: Resolution, }, + local_tarball: struct { + tarball: ExtractTarball, + }, }; }; @@ -1795,6 +1830,8 @@ pub const PackageManager = struct { .git => manager.cachedGitFolderNamePrintAuto(&this.resolution.value.git), .github => manager.cachedGitHubFolderNamePrintAuto(&this.resolution.value.github), .npm => manager.cachedNPMPackageFolderName(lockfile.str(&this.name), this.resolution.value.npm.version), + .local_tarball => manager.cachedTarballFolderName(this.resolution.value.local_tarball), + .remote_tarball => manager.cachedTarballFolderName(this.resolution.value.remote_tarball), else => "", }; @@ -2122,6 +2159,14 @@ pub const PackageManager = struct { }) catch unreachable; } + pub fn cachedTarballFolderNamePrint(buf: []u8, url: string) stringZ { + return std.fmt.bufPrintZ(buf, "@T@{any}", .{bun.fmt.hexIntLower(String.Builder.stringHash(url))}) catch unreachable; + } + + pub fn cachedTarballFolderName(this: *const PackageManager, url: String) stringZ { + return cachedTarballFolderNamePrint(&cached_package_folder_name_buf, this.lockfile.str(&url)); + } + pub fn isFolderInCache(this: *PackageManager, folder_path: stringZ) bool { // TODO: is this slow? var dir = this.getCacheDirectory().dir.openDirZ(folder_path, .{}, true) catch return false; @@ -2390,7 +2435,6 @@ pub const PackageManager = struct { .resolution = package.resolution, .cache_dir = this.getCacheDirectory().dir, .temp_dir = this.getTemporaryDirectory().dir, - .registry = scope.url.href, .dependency_id = dependency_id, .integrity = package.meta.integrity, .url = url, @@ -2657,6 +2701,42 @@ pub const PackageManager = struct { return &task.threadpool_task; } + fn enqueueLocalTarball( + this: *PackageManager, + task_id: u64, + dependency_id: DependencyID, + name: String, + path: string, + resolution: Resolution, + ) *ThreadPool.Task { + var task = this.allocator.create(Task) catch unreachable; + task.* = Task{ + .package_manager = &PackageManager.instance, // https://github.com/ziglang/zig/issues/14005 + .log = logger.Log.init(this.allocator), + .tag = Task.Tag.local_tarball, + .request = .{ + .local_tarball = .{ + .tarball = .{ + .package_manager = &PackageManager.instance, // https://github.com/ziglang/zig/issues/14005 + .name = strings.StringOrTinyString.initAppendIfNeeded( + this.lockfile.str(&name), + *FileSystem.FilenameStore, + &FileSystem.FilenameStore.instance, + ) catch unreachable, + .resolution = resolution, + .cache_dir = this.getCacheDirectory().dir, + .temp_dir = this.getTemporaryDirectory().dir, + .dependency_id = dependency_id, + .url = path, + }, + }, + }, + .id = task_id, + .data = undefined, + }; + return &task.threadpool_task; + } + pub fn writeYarnLock(this: *PackageManager) !void { var printer = Lockfile.Printer{ .lockfile = this.lockfile, @@ -2737,7 +2817,7 @@ pub const PackageManager = struct { const name = dependency.realname(); const name_hash = switch (dependency.version.tag) { - .dist_tag, .git, .github, .npm => String.Builder.stringHash(this.lockfile.str(&name)), + .dist_tag, .git, .github, .npm, .tarball => String.Builder.stringHash(this.lockfile.str(&name)), else => dependency.name_hash, }; const version = dependency.version; @@ -3020,12 +3100,6 @@ pub const PackageManager = struct { return; } - const package = Lockfile.Package{ - .name = dependency.name, - .name_hash = dependency.name_hash, - .resolution = res, - }; - const url = this.allocGitHubURL(dep) catch unreachable; const task_id = Task.Id.forTarball(url); var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable; @@ -3037,7 +3111,11 @@ pub const PackageManager = struct { try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); if (dependency.behavior.isPeer()) return; - if (try this.generateNetworkTaskForTarball(task_id, url, id, package)) |network_task| { + if (try this.generateNetworkTaskForTarball(task_id, url, id, .{ + .name = dependency.name, + .name_hash = dependency.name_hash, + .resolution = res, + })) |network_task| { this.enqueueNetworkTask(network_task); } }, @@ -3110,6 +3188,66 @@ pub const PackageManager = struct { ) catch unreachable; } }, + .tarball => { + const res: Resolution = switch (dependency.version.value.tarball.uri) { + .local => |path| .{ + .tag = .local_tarball, + .value = .{ + .local_tarball = path, + }, + }, + .remote => |url| .{ + .tag = .remote_tarball, + .value = .{ + .remote_tarball = url, + }, + }, + }; + + // First: see if we already loaded the tarball package in-memory + if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + successFn(this, id, pkg_id); + return; + } + + const url = switch (dependency.version.value.tarball.uri) { + .local => |path| this.lockfile.str(&path), + .remote => |url| this.lockfile.str(&url), + }; + const task_id = Task.Id.forTarball(url); + var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable; + if (!entry.found_existing) { + entry.value_ptr.* = TaskCallbackList{}; + } + + const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; + try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); + + if (dependency.behavior.isPeer()) return; + switch (dependency.version.value.tarball.uri) { + .local => { + const network_entry = try this.network_dedupe_map.getOrPutContext(this.allocator, task_id, .{}); + if (network_entry.found_existing) return; + + this.task_batch.push(ThreadPool.Batch.from(this.enqueueLocalTarball( + task_id, + id, + dependency.name, + url, + res, + ))); + }, + .remote => { + if (try this.generateNetworkTaskForTarball(task_id, url, id, .{ + .name = dependency.name, + .name_hash = dependency.name_hash, + .resolution = res, + })) |network_task| { + this.enqueueNetworkTask(network_task); + } + }, + } + }, else => {}, } } @@ -3282,6 +3420,29 @@ pub const PackageManager = struct { } }; + const TarballResolver = struct { + url: string, + resolution: Resolution, + + pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void { + builder.count(this.url); + } + + pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution { + var resolution = this.resolution; + switch (resolution.tag) { + .local_tarball => { + resolution.value.local_tarball = builder.append(String, this.url); + }, + .remote_tarball => { + resolution.value.remote_tarball = builder.append(String, this.url); + }, + else => unreachable, + } + return resolution; + } + }; + /// Returns true if we need to drain dependencies fn processExtractedTarballPackage( manager: *PackageManager, @@ -3329,6 +3490,44 @@ pub const PackageManager = struct { return package; }, + .local_tarball, .remote_tarball => { + const package_json_source = logger.Source.initPathString( + data.json_path, + data.json_buf[0..data.json_len], + ); + var package = Lockfile.Package{}; + + package.parse( + manager.lockfile, + manager.allocator, + manager.log, + package_json_source, + TarballResolver, + TarballResolver{ + .url = data.url, + .resolution = resolution, + }, + Features.npm, + ) catch |err| { + if (comptime log_level != .silent) { + const string_buf = manager.lockfile.buffers.string_bytes.items; + Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{ + resolution.fmtURL(&manager.options, string_buf), + @errorName(err), + }); + } + Global.crash(); + }; + + package = manager.lockfile.appendPackage(package) catch unreachable; + package_id.* = package.meta.id; + + if (package.dependencies.len > 0) { + manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch unreachable; + } + + return package; + }, else => {}, } @@ -3700,8 +3899,12 @@ pub const PackageManager = struct { } } }, - .extract => { - const tarball = task.request.extract.tarball; + .extract, .local_tarball => { + const tarball = switch (task.tag) { + .extract => task.request.extract.tarball, + .local_tarball => task.request.local_tarball.tarball, + else => unreachable, + }; const dependency_id = tarball.dependency_id; var package_id = manager.lockfile.buffers.resolutions.items[dependency_id]; const alias = tarball.name.slice(); @@ -3717,7 +3920,11 @@ pub const PackageManager = struct { alias, resolution, err, - task.request.extract.network.url_buf, + switch (task.tag) { + .extract => task.request.extract.network.url_buf, + .local_tarball => task.request.local_tarball.tarball.url, + else => unreachable, + }, ); } else if (comptime log_level != .silent) { const fmt = "<r><red>error<r>: {s} extracting tarball for <b>{s}<r>"; @@ -3737,7 +3944,7 @@ pub const PackageManager = struct { manager.extracted_count += 1; bun.Analytics.Features.extracted_packages = true; - // GitHub (and eventually tarball URL) dependencies are not fully resolved until after the tarball is downloaded & extracted. + // GitHub and tarball URL dependencies are not fully resolved until after the tarball is downloaded & extracted. if (manager.processExtractedTarballPackage(&package_id, resolution, task.data.extract, comptime log_level)) |pkg| brk: { // In the middle of an install, you could end up needing to downlaod the github tarball for a dependency // We need to make sure we resolve the dependencies first before calling the onExtract callback @@ -3759,7 +3966,16 @@ pub const PackageManager = struct { for (dependency_list.items) |dep| { switch (dep) { .dependency, .root_dependency => |id| { - manager.lockfile.buffers.dependencies.items[id].version.value.github.package_name = pkg.name; + var version = &manager.lockfile.buffers.dependencies.items[id].version; + switch (version.tag) { + .github => { + version.value.github.package_name = pkg.name; + }, + .tarball => { + version.value.tarball.package_name = pkg.name; + }, + else => unreachable, + } try manager.processDependencyListItem(dep, &any_root); }, else => { @@ -6202,6 +6418,14 @@ pub const PackageManager = struct { } installer.cache_dir = .{ .dir = std.fs.cwd() }; }, + .local_tarball => { + installer.cache_dir_subpath = this.manager.cachedTarballFolderName(resolution.value.local_tarball); + installer.cache_dir = this.manager.getCacheDirectory(); + }, + .remote_tarball => { + installer.cache_dir_subpath = this.manager.cachedTarballFolderName(resolution.value.remote_tarball); + installer.cache_dir = this.manager.getCacheDirectory(); + }, .workspace => { const folder = resolution.value.workspace.slice(buf); // Handle when a package depends on itself diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index bdb5053c3..f9a0fe132 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -2800,3 +2800,193 @@ it("should prefer dependencies over peerDependencies of the same name", async () }, }); }); + +it("should handle tarball URL", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + baz: `${root_url}/baz-0.0.3.tgz`, + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + ` + baz@${root_url}/baz-0.0.3.tgz`, + "", + " 1 packages installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/baz-0.0.3.tgz`]); + expect(requested).toBe(1); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "baz"]); + expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]); + expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js")); + expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]); + expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({ + name: "baz", + version: "0.0.3", + bin: { + "baz-run": "index.js", + }, + }); + await access(join(package_dir, "bun.lockb")); +}); + +it("should handle tarball path", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + baz: join(import.meta.dir, "baz-0.0.3.tgz"), + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + ` + baz@${join(import.meta.dir, "baz-0.0.3.tgz")}`, + "", + " 1 packages installed", + ]); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "baz"]); + expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]); + expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js")); + expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]); + expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({ + name: "baz", + version: "0.0.3", + bin: { + "baz-run": "index.js", + }, + }); + await access(join(package_dir, "bun.lockb")); +}); + +it("should handle tarball URL with aliasing", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + bar: `${root_url}/baz-0.0.3.tgz`, + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + ` + bar@${root_url}/baz-0.0.3.tgz`, + "", + " 1 packages installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/baz-0.0.3.tgz`]); + expect(requested).toBe(1); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar"]); + expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]); + expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "bar", "index.js")); + expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["index.js", "package.json"]); + expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({ + name: "baz", + version: "0.0.3", + bin: { + "baz-run": "index.js", + }, + }); + await access(join(package_dir, "bun.lockb")); +}); + +it("should handle tarball path with aliasing", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + bar: join(import.meta.dir, "baz-0.0.3.tgz"), + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + ` + bar@${join(import.meta.dir, "baz-0.0.3.tgz")}`, + "", + " 1 packages installed", + ]); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar"]); + expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-run"]); + expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "bar", "index.js")); + expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["index.js", "package.json"]); + expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({ + name: "baz", + version: "0.0.3", + bin: { + "baz-run": "index.js", + }, + }); + await access(join(package_dir, "bun.lockb")); +}); |