aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Alex Lam S.L <alexlamsl@gmail.com> 2023-05-10 08:26:29 +0300
committerGravatar GitHub <noreply@github.com> 2023-05-10 08:26:29 +0300
commit4ccca130018300b23d0bedf252bd62c804241f10 (patch)
tree1a8ae55595b511da325ae981a687b4878642ce29
parent1e8b9258b2ac77f6db1b42ce82928fed86ffc77d (diff)
downloadbun-4ccca130018300b23d0bedf252bd62c804241f10.tar.gz
bun-4ccca130018300b23d0bedf252bd62c804241f10.tar.zst
bun-4ccca130018300b23d0bedf252bd62c804241f10.zip
[install] operator on root package of workspaces (#2834)
- parse `bunfig.toml` from same directory as `package.json` - handle `--cwd` correctly fixes #2592
-rw-r--r--src/bun.zig6
-rw-r--r--src/fs.zig2
-rw-r--r--src/install/install.zig133
-rw-r--r--src/install/lockfile.zig79
-rw-r--r--test/cli/install/bun-add.test.ts64
-rw-r--r--test/cli/install/bun-install.test.ts160
6 files changed, 352 insertions, 92 deletions
diff --git a/src/bun.zig b/src/bun.zig
index dce5d95ad..6b8dffbfa 100644
--- a/src/bun.zig
+++ b/src/bun.zig
@@ -1385,12 +1385,16 @@ pub const StringMap = struct {
entry.value_ptr.* = try self.map.allocator.dupe(u8, value);
}
-
pub const put = insert;
+
pub fn get(self: *const StringMap, key: []const u8) ?[]const u8 {
return self.map.get(key);
}
+ pub fn sort(self: *StringMap, sort_ctx: anytype) void {
+ self.map.sort(sort_ctx);
+ }
+
pub fn deinit(self: *StringMap) void {
for (self.map.values()) |value| {
self.map.allocator.free(value);
diff --git a/src/fs.zig b/src/fs.zig
index f85678136..1d31bc480 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -882,7 +882,7 @@ pub const FileSystem = struct {
generation: bun.Generation,
store_fd: bool,
) !*EntriesOption {
- return readDirectoryWithIterator(fs, _dir, _handle, generation, store_fd, void, {});
+ return fs.readDirectoryWithIterator(_dir, _handle, generation, store_fd, void, {});
}
// One of the learnings here
diff --git a/src/install/install.zig b/src/install/install.zig
index dad1c4f99..d56c4c3c5 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -4934,14 +4934,13 @@ pub const PackageManager = struct {
return initMaybeInstall(ctx, package_json_file_, params, false);
}
- pub fn initMaybeInstall(
+ fn initMaybeInstall(
ctx: Command.Context,
package_json_file_: ?std.fs.File,
comptime params: []const ParamType,
comptime is_install: bool,
) !*PackageManager {
- var _ctx = ctx;
- var cli = try CommandLineArguments.parse(ctx.allocator, params, &_ctx);
+ const cli = try CommandLineArguments.parse(ctx.allocator, params);
if (comptime is_install) {
if (cli.positionals.len > 1) {
@@ -4949,13 +4948,15 @@ pub const PackageManager = struct {
}
}
- return try initWithCLI(_ctx, package_json_file_, cli);
+ var _ctx = ctx;
+ return initWithCLI(&_ctx, package_json_file_, cli, is_install);
}
- pub fn initWithCLI(
- ctx: Command.Context,
+ fn initWithCLI(
+ ctx: *Command.Context,
package_json_file_: ?std.fs.File,
cli: CommandLineArguments,
+ comptime is_install: bool,
) !*PackageManager {
// assume that spawning a thread will take a lil so we do that asap
try HTTP.HTTPThread.init();
@@ -4970,7 +4971,7 @@ pub const PackageManager = struct {
}
var fs = try Fs.FileSystem.init(null);
- var original_cwd = std.mem.trimRight(u8, fs.top_level_dir, "/");
+ var original_cwd = strings.withoutTrailingSlash(fs.top_level_dir);
bun.copy(u8, &cwd_buf, original_cwd);
@@ -4978,41 +4979,93 @@ pub const PackageManager = struct {
//
// We will walk up from the cwd, calling chdir on each directory until we find a package.json
// If we fail to find one, we will report an error saying no packages to install
- var package_json_file: std.fs.File = undefined;
-
- if (package_json_file_) |file| {
- package_json_file = file;
- } else {
-
- // can't use orelse due to a stage1 bug
- package_json_file = std.fs.cwd().openFileZ("package.json", .{ .mode = .read_write }) catch brk: {
- var this_cwd = original_cwd;
- outer: while (std.fs.path.dirname(this_cwd)) |parent| {
- cwd_buf[parent.len] = 0;
- var chdir = cwd_buf[0..parent.len :0];
-
- std.os.chdirZ(chdir) catch |err| {
- Output.prettyErrorln("Error {s} while chdir - {s}", .{ @errorName(err), bun.span(chdir) });
+ const package_json_file = package_json_file_ orelse brk: {
+ var this_cwd = original_cwd;
+ const child_json = child: {
+ while (true) {
+ var dir = std.fs.openDirAbsolute(this_cwd, .{}) catch |err| {
+ Output.prettyErrorln("Error {s} accessing {s}", .{ @errorName(err), this_cwd });
Output.flush();
return err;
};
+ defer dir.close();
+ break :child dir.openFileZ("package.json", .{ .mode = .read_write }) catch {
+ if (std.fs.path.dirname(this_cwd)) |parent| {
+ this_cwd = parent;
+ continue;
+ } else {
+ break;
+ }
+ };
+ }
+ return error.MissingPackageJSON;
+ };
- break :brk std.fs.cwd().openFileZ("package.json", .{ .mode = .read_write }) catch {
+ const child_cwd = this_cwd;
+ // Check if this is a workspace; if so, use root package
+ if (comptime is_install) {
+ var found = false;
+ while (std.fs.path.dirname(this_cwd)) |parent| {
+ var dir = std.fs.openDirAbsolute(parent, .{}) catch break;
+ defer dir.close();
+ const json_file = dir.openFileZ("package.json", .{ .mode = .read_write }) catch {
this_cwd = parent;
- continue :outer;
+ continue;
};
+ defer if (!found) json_file.close();
+ const json_stat = try json_file.stat();
+ const json_buf = try ctx.allocator.alloc(u8, json_stat.size + 64);
+ defer ctx.allocator.free(json_buf);
+ const json_len = try json_file.preadAll(json_buf, 0);
+ var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ const json_path = try bun.getFdPath(json_file.handle, &path_buf);
+ const json_source = logger.Source.initPathString(
+ json_path,
+ json_buf[0..json_len],
+ );
+ initializeStore();
+ const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator);
+ if (json.asProperty("workspaces")) |prop| {
+ var workspace_names = bun.StringMap.init(ctx.allocator, true);
+ defer workspace_names.deinit();
+ const json_array = switch (prop.expr.data) {
+ .e_array => |arr| arr,
+ .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) {
+ .e_array => |arr| arr,
+ else => break,
+ } else break,
+ else => break,
+ };
+ _ = Package.processWorkspaceNamesArray(
+ &workspace_names,
+ ctx.allocator,
+ ctx.log,
+ json_array,
+ &json_source,
+ prop.loc,
+ null,
+ ) catch break;
+ for (workspace_names.keys()) |path| {
+ if (strings.eql(child_cwd, path)) {
+ found = true;
+ child_json.close();
+ fs.top_level_dir = parent;
+ break :brk json_file;
+ }
+ }
+ break;
+ }
+ this_cwd = parent;
}
+ }
- bun.copy(u8, &cwd_buf, original_cwd);
- cwd_buf[original_cwd.len] = 0;
- var real_cwd: [:0]u8 = cwd_buf[0..original_cwd.len :0];
- std.os.chdirZ(real_cwd) catch {};
-
- return error.MissingPackageJSON;
- };
- }
+ fs.top_level_dir = child_cwd;
+ break :brk child_json;
+ };
- fs.top_level_dir = try std.os.getcwd(&cwd_buf);
+ try std.os.chdir(fs.top_level_dir);
+ try BunArguments.loadConfig(ctx.allocator, cli.config, ctx, .InstallCommand);
+ bun.copy(u8, &cwd_buf, fs.top_level_dir);
cwd_buf[fs.top_level_dir.len] = '/';
cwd_buf[fs.top_level_dir.len + 1] = 0;
fs.top_level_dir = cwd_buf[0 .. fs.top_level_dir.len + 1];
@@ -5640,11 +5693,7 @@ pub const PackageManager = struct {
}
};
- pub fn parse(
- allocator: std.mem.Allocator,
- comptime params: []const ParamType,
- ctx: *Command.Context,
- ) !CommandLineArguments {
+ pub fn parse(allocator: std.mem.Allocator, comptime params: []const ParamType) !CommandLineArguments {
var diag = clap.Diagnostic{};
var args = clap.parse(clap.Help, params, .{
@@ -5693,8 +5742,6 @@ pub const PackageManager = struct {
cli.config = opt;
}
- try BunArguments.loadConfig(allocator, cli.config, ctx, .InstallCommand);
-
cli.link_native_bins = args.options("--link-native-bins");
if (comptime params.len == add_params.len) {
@@ -6296,10 +6343,8 @@ pub const PackageManager = struct {
var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
- pub inline fn install(
- ctx: Command.Context,
- ) !void {
- var manager = PackageManager.initMaybeInstall(ctx, null, &install_params, true) catch |err| {
+ pub inline fn install(ctx: Command.Context) !void {
+ var manager = initMaybeInstall(ctx, null, &install_params, true) catch |err| {
if (err == error.SwitchToBunAdd) {
return add(ctx);
}
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index 61a71c970..8006d85bb 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -2456,17 +2456,16 @@ pub const Package = extern struct {
};
}
- fn processWorkspaceNamesArray(
- workspace_names: *bun.StringArrayHashMap(string),
+ pub fn processWorkspaceNamesArray(
+ workspace_names: *bun.StringMap,
allocator: Allocator,
log: *logger.Log,
arr: *JSAst.E.Array,
source: *const logger.Source,
loc: logger.Loc,
- string_builder: *StringBuilder,
+ string_builder: ?*StringBuilder,
) !u32 {
- workspace_names.* = bun.StringArrayHashMap(string).init(allocator);
- workspace_names.ensureTotalCapacity(arr.items.len) catch unreachable;
+ if (arr.items.len == 0) return 0;
var fallback = std.heap.stackFallback(1024, allocator);
var workspace_allocator = fallback.get();
@@ -2516,6 +2515,8 @@ pub const Package = extern struct {
\\TODO fancy glob patterns. For now, try something like "packages/*"
) catch {};
continue;
+ } else if (string_builder == null) {
+ input_path = Path.joinAbsStringBuf(source.path.name.dir, filepath_buf, &[_]string{input_path}, .auto);
}
const workspace_entry = processWorkspaceName(
@@ -2561,15 +2562,13 @@ pub const Package = extern struct {
if (workspace_entry.name.len == 0) continue;
- string_builder.count(workspace_entry.name);
- string_builder.count(input_path);
- string_builder.cap += bun.MAX_PATH_BYTES;
-
- var result = try workspace_names.getOrPut(input_path);
- if (!result.found_existing) {
- result.key_ptr.* = try allocator.dupe(u8, input_path);
- result.value_ptr.* = try allocator.dupe(u8, workspace_entry.name);
+ if (string_builder) |builder| {
+ builder.count(workspace_entry.name);
+ builder.count(input_path);
+ builder.cap += bun.MAX_PATH_BYTES;
}
+
+ try workspace_names.insert(input_path, workspace_entry.name);
}
if (asterisked_workspace_paths.items.len > 0) {
@@ -2579,9 +2578,12 @@ pub const Package = extern struct {
defer allocator.destroy(second_buf);
for (asterisked_workspace_paths.items) |user_path| {
- var dir_prefix = strings.withoutLeadingSlash(user_path);
- dir_prefix = user_path[0 .. strings.indexOfChar(dir_prefix, '*') orelse continue];
+ var dir_prefix = if (string_builder) |_|
+ strings.withoutLeadingSlash(user_path)
+ else
+ Path.joinAbsStringBuf(source.path.name.dir, filepath_buf, &[_]string{user_path}, .auto);
+ dir_prefix = dir_prefix[0 .. strings.indexOfChar(dir_prefix, '*') orelse continue];
if (dir_prefix.len == 0 or
strings.eqlComptime(dir_prefix, ".") or
strings.eqlComptime(dir_prefix, "./"))
@@ -2687,25 +2689,20 @@ pub const Package = extern struct {
if (workspace_entry.name.len == 0) continue;
- string_builder.count(workspace_entry.name);
- second_buf_fixed.reset();
- const relative = std.fs.path.relative(
- second_buf_fixed.allocator(),
- Fs.FileSystem.instance.top_level_dir,
- bun.span(entry_path),
- ) catch unreachable;
-
- string_builder.count(relative);
- string_builder.cap += bun.MAX_PATH_BYTES;
-
- var result = try workspace_names.getOrPut(relative);
- if (!result.found_existing) {
- result.value_ptr.* = try allocator.dupe(
- u8,
- workspace_entry.name,
- );
- result.key_ptr.* = try allocator.dupe(u8, relative);
- }
+ const workspace_path: string = if (string_builder) |builder| brk: {
+ second_buf_fixed.reset();
+ const relative = std.fs.path.relative(
+ second_buf_fixed.allocator(),
+ Fs.FileSystem.instance.top_level_dir,
+ bun.span(entry_path),
+ ) catch unreachable;
+ builder.count(workspace_entry.name);
+ builder.count(relative);
+ builder.cap += bun.MAX_PATH_BYTES;
+ break :brk relative;
+ } else bun.span(entry_path);
+
+ try workspace_names.insert(workspace_path, workspace_entry.name);
}
}
}
@@ -2869,14 +2866,8 @@ pub const Package = extern struct {
break :brk out_groups;
};
- var workspace_names = bun.StringArrayHashMap(string).init(allocator);
- defer {
- for (workspace_names.values(), workspace_names.keys()) |name, path| {
- allocator.free(name);
- allocator.free(path);
- }
- workspace_names.deinit();
- }
+ var workspace_names = bun.StringMap.init(allocator, true);
+ defer workspace_names.deinit();
inline for (dependency_groups) |group| {
if (json.asProperty(group.prop)) |dependencies_q| brk: {
@@ -2891,8 +2882,6 @@ pub const Package = extern struct {
, .{group.prop}) catch {};
return error.InvalidPackageJSON;
}
- if (arr.items.len == 0) break :brk;
-
total_dependencies_count += try processWorkspaceNamesArray(
&workspace_names,
allocator,
@@ -2916,8 +2905,6 @@ pub const Package = extern struct {
//
if (obj.get("packages")) |packages_query| {
if (packages_query.data == .e_array) {
- if (packages_query.data.e_array.items.len == 0) break :brk;
-
total_dependencies_count += try processWorkspaceNamesArray(
&workspace_names,
allocator,
diff --git a/test/cli/install/bun-add.test.ts b/test/cli/install/bun-add.test.ts
index 03911da9b..b14afa918 100644
--- a/test/cli/install/bun-add.test.ts
+++ b/test/cli/install/bun-add.test.ts
@@ -1351,3 +1351,67 @@ it("should add dependency without duplication (GitHub)", async () => {
);
await access(join(package_dir, "bun.lockb"));
});
+
+it("should add dependencies to workspaces directly", async () => {
+ const foo_package = JSON.stringify({
+ name: "foo",
+ version: "0.1.0",
+ workspaces: ["moo"],
+ });
+ await writeFile(join(add_dir, "package.json"), foo_package);
+ const bar_package = JSON.stringify({
+ name: "bar",
+ version: "0.2.0",
+ workspaces: ["moo"],
+ });
+ await writeFile(join(package_dir, "package.json"), bar_package);
+ await mkdir(join(package_dir, "moo"));
+ await writeFile(
+ join(package_dir, "moo", "package.json"),
+ JSON.stringify({
+ name: "moo",
+ version: "0.3.0",
+ }),
+ );
+ await writeFile(join(package_dir, "moo", "bunfig.toml"), await file(join(package_dir, "bunfig.toml")).text());
+ const add_path = relative(join(package_dir, "moo"), add_dir);
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "add", `file:${add_path}`],
+ cwd: join(package_dir, "moo"),
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ "",
+ ` installed foo@${add_path}`,
+ "",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(await readdirSorted(join(package_dir))).toEqual(["bunfig.toml", "moo", "package.json"]);
+ expect(await file(join(package_dir, "package.json")).text()).toEqual(bar_package);
+ expect(await readdirSorted(join(package_dir, "moo"))).toEqual([
+ "bun.lockb",
+ "bunfig.toml",
+ "node_modules",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "moo", "package.json")).json()).toEqual({
+ name: "moo",
+ version: "0.3.0",
+ dependencies: {
+ foo: `file:${add_path}`,
+ },
+ });
+ expect(await readdirSorted(join(package_dir, "moo", "node_modules"))).toEqual([".cache", "foo"]);
+ expect(await readdirSorted(join(package_dir, "moo", "node_modules", "foo"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "moo", "node_modules", "foo", "package.json")).text()).toEqual(foo_package);
+});
diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts
index 9833ab3f4..7d72ae318 100644
--- a/test/cli/install/bun-install.test.ts
+++ b/test/cli/install/bun-install.test.ts
@@ -3936,3 +3936,163 @@ it("should deduplicate devDependencies from folder", async () => {
expect(await file(join(package_dir, "node_modules", "moo", "package.json")).text()).toEqual(moo_package);
await access(join(package_dir, "bun.lockb"));
});
+
+it("should install dependencies in root package of workspace", async () => {
+ const urls: string[] = [];
+ setHandler(dummyRegistry(urls));
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "foo",
+ version: "0.1.0",
+ workspaces: ["moo"],
+ }),
+ );
+ await mkdir(join(package_dir, "moo"));
+ const moo_package = JSON.stringify({
+ name: "moo",
+ version: "0.2.0",
+ dependencies: {
+ bar: "^0.0.2",
+ },
+ });
+ await writeFile(join(package_dir, "moo", "package.json"), moo_package);
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install"],
+ cwd: join(package_dir, "moo"),
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + moo@workspace:moo",
+ "",
+ " 2 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls.sort()).toEqual([`${root_url}/bar`, `${root_url}/bar-0.0.2.tgz`]);
+ expect(requested).toBe(2);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "bar", "moo"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({
+ name: "bar",
+ version: "0.0.2",
+ });
+ expect(await readdirSorted(join(package_dir, "node_modules", "moo"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "node_modules", "moo", "package.json")).text()).toEqual(moo_package);
+ await access(join(package_dir, "bun.lockb"));
+});
+
+it("should install dependencies in root package of workspace (*)", async () => {
+ const urls: string[] = [];
+ setHandler(dummyRegistry(urls));
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "foo",
+ version: "0.1.0",
+ workspaces: ["*"],
+ }),
+ );
+ await mkdir(join(package_dir, "moo"));
+ const moo_package = JSON.stringify({
+ name: "moo",
+ version: "0.2.0",
+ dependencies: {
+ bar: "^0.0.2",
+ },
+ });
+ await writeFile(join(package_dir, "moo", "package.json"), moo_package);
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install"],
+ cwd: join(package_dir, "moo"),
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + moo@workspace:moo",
+ "",
+ " 2 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls.sort()).toEqual([`${root_url}/bar`, `${root_url}/bar-0.0.2.tgz`]);
+ expect(requested).toBe(2);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "bar", "moo"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "node_modules", "bar", "package.json")).json()).toEqual({
+ name: "bar",
+ version: "0.0.2",
+ });
+ expect(await readdirSorted(join(package_dir, "node_modules", "moo"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "node_modules", "moo", "package.json")).text()).toEqual(moo_package);
+ await access(join(package_dir, "bun.lockb"));
+});
+
+it("should handle --cwd", async () => {
+ const urls: string[] = [];
+ setHandler(dummyRegistry(urls));
+ const foo_package = JSON.stringify({
+ name: "foo",
+ version: "0.1.0",
+ });
+ await writeFile(join(package_dir, "package.json"), foo_package);
+ await mkdir(join(package_dir, "moo"));
+ await writeFile(join(package_dir, "moo", "bunfig.toml"), await file(join(package_dir, "bunfig.toml")).text());
+ const moo_package = JSON.stringify({
+ name: "moo",
+ version: "0.2.0",
+ dependencies: {
+ bar: "^0.0.2",
+ },
+ });
+ await writeFile(join(package_dir, "moo", "package.json"), moo_package);
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--cwd", "moo"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + bar@0.0.2",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls.sort()).toEqual([`${root_url}/bar`, `${root_url}/bar-0.0.2.tgz`]);
+ expect(requested).toBe(2);
+ expect(await readdirSorted(join(package_dir))).toEqual(["bunfig.toml", "moo", "package.json"]);
+ expect(await file(join(package_dir, "package.json")).text()).toEqual(foo_package);
+ expect(await readdirSorted(join(package_dir, "moo"))).toEqual([
+ "bun.lockb",
+ "bunfig.toml",
+ "node_modules",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "moo", "package.json")).text()).toEqual(moo_package);
+ expect(await readdirSorted(join(package_dir, "moo", "node_modules"))).toEqual([".cache", "bar"]);
+ expect(await readdirSorted(join(package_dir, "moo", "node_modules", "bar"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "moo", "node_modules", "bar", "package.json")).json()).toEqual({
+ name: "bar",
+ version: "0.0.2",
+ });
+});