aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--Makefile4
-rw-r--r--src/install/install.zig180
-rw-r--r--test/cli/install/bun-add.test.ts15
-rw-r--r--test/cli/install/bun-install.test.ts57
4 files changed, 153 insertions, 103 deletions
diff --git a/Makefile b/Makefile
index f080534f2..d2264ca7d 100644
--- a/Makefile
+++ b/Makefile
@@ -767,10 +767,10 @@ sign-macos-aarch64:
gon sign.macos-aarch64.json
cls:
- @echo "\n\n---\n\n"
+ @echo -e "\n\n---\n\n"
jsc-check:
- @ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
+ @ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_INCLUDE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit include directory at $(JSC_INCLUDE_DIR)." && exit 1)
@ls $(JSC_LIB) >/dev/null 2>&1 || (echo "Failed to access WebKit lib directory at $(JSC_LIB)." && exit 1)
diff --git a/src/install/install.zig b/src/install/install.zig
index d56c4c3c5..810bc4afb 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -5003,59 +5003,57 @@ pub const PackageManager = struct {
const child_cwd = this_cwd;
// Check if this is a workspace; if so, use root package
- if (comptime is_install) {
- var found = false;
- while (std.fs.path.dirname(this_cwd)) |parent| {
- var dir = std.fs.openDirAbsolute(parent, .{}) catch break;
- defer dir.close();
- const json_file = dir.openFileZ("package.json", .{ .mode = .read_write }) catch {
- this_cwd = parent;
- continue;
- };
- defer if (!found) json_file.close();
- const json_stat = try json_file.stat();
- const json_buf = try ctx.allocator.alloc(u8, json_stat.size + 64);
- defer ctx.allocator.free(json_buf);
- const json_len = try json_file.preadAll(json_buf, 0);
- var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
- const json_path = try bun.getFdPath(json_file.handle, &path_buf);
- const json_source = logger.Source.initPathString(
- json_path,
- json_buf[0..json_len],
- );
- initializeStore();
- const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator);
- if (json.asProperty("workspaces")) |prop| {
- var workspace_names = bun.StringMap.init(ctx.allocator, true);
- defer workspace_names.deinit();
- const json_array = switch (prop.expr.data) {
+ var found = false;
+ while (std.fs.path.dirname(this_cwd)) |parent| : (this_cwd = parent) {
+ var dir = std.fs.openDirAbsolute(parent, .{}) catch break;
+ defer dir.close();
+ const json_file = dir.openFileZ("package.json", .{ .mode = .read_write }) catch {
+ continue;
+ };
+ defer if (!found) json_file.close();
+ const json_stat = try json_file.stat();
+ const json_buf = try ctx.allocator.alloc(u8, json_stat.size + 64);
+ defer ctx.allocator.free(json_buf);
+ const json_len = try json_file.preadAll(json_buf, 0);
+ const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf);
+ const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]);
+ initializeStore();
+ const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator);
+ if (json.asProperty("workspaces")) |prop| {
+ var workspace_names = bun.StringMap.init(ctx.allocator, true);
+ defer workspace_names.deinit();
+ const json_array = switch (prop.expr.data) {
+ .e_array => |arr| arr,
+ .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) {
.e_array => |arr| arr,
- .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) {
- .e_array => |arr| arr,
- else => break,
- } else break,
else => break,
- };
- _ = Package.processWorkspaceNamesArray(
- &workspace_names,
- ctx.allocator,
- ctx.log,
- json_array,
- &json_source,
- prop.loc,
- null,
- ) catch break;
- for (workspace_names.keys()) |path| {
- if (strings.eql(child_cwd, path)) {
+ } else break,
+ else => break,
+ };
+ var log = logger.Log.init(ctx.allocator);
+ defer log.deinit();
+ _ = Package.processWorkspaceNamesArray(
+ &workspace_names,
+ ctx.allocator,
+ &log,
+ json_array,
+ &json_source,
+ prop.loc,
+ null,
+ ) catch break;
+ for (workspace_names.keys()) |path| {
+ if (strings.eql(child_cwd, path)) {
+ fs.top_level_dir = parent;
+ if (comptime is_install) {
found = true;
child_json.close();
- fs.top_level_dir = parent;
break :brk json_file;
+ } else {
+ break :brk child_json;
}
}
- break;
}
- this_cwd = parent;
+ break;
}
}
@@ -5069,8 +5067,7 @@ pub const PackageManager = struct {
cwd_buf[fs.top_level_dir.len] = '/';
cwd_buf[fs.top_level_dir.len + 1] = 0;
fs.top_level_dir = cwd_buf[0 .. fs.top_level_dir.len + 1];
- bun.copy(u8, &package_json_cwd_buf, fs.top_level_dir);
- bun.copy(u8, package_json_cwd_buf[fs.top_level_dir.len..], "package.json");
+ package_json_cwd = try bun.getFdPath(package_json_file.handle, &package_json_cwd_buf);
var entries_option = try fs.fs.readDirectory(fs.top_level_dir, null, 0, true);
var options = Options{
@@ -5338,7 +5335,7 @@ pub const PackageManager = struct {
);
const package_json_source = logger.Source.initPathString(
- package_json_cwd_buf[0 .. FileSystem.instance.top_level_dir.len + "package.json".len],
+ package_json_cwd,
current_package_json_buf[0..current_package_json_contents_len],
);
try lockfile.initEmpty(ctx.allocator);
@@ -5502,7 +5499,7 @@ pub const PackageManager = struct {
);
const package_json_source = logger.Source.initPathString(
- package_json_cwd_buf[0 .. FileSystem.instance.top_level_dir.len + "package.json".len],
+ package_json_cwd,
current_package_json_buf[0..current_package_json_contents_len],
);
try lockfile.initEmpty(ctx.allocator);
@@ -6104,7 +6101,7 @@ pub const PackageManager = struct {
);
const package_json_source = logger.Source.initPathString(
- package_json_cwd_buf[0 .. FileSystem.instance.top_level_dir.len + "package.json".len],
+ package_json_cwd,
current_package_json_buf[0..current_package_json_contents_len],
);
@@ -6342,6 +6339,7 @@ pub const PackageManager = struct {
var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var package_json_cwd: string = "";
pub inline fn install(ctx: Command.Context) !void {
var manager = initMaybeInstall(ctx, null, &install_params, true) catch |err| {
@@ -7210,10 +7208,7 @@ pub const PackageManager = struct {
// Step 2. Parse the package.json file
//
- var package_json_source = logger.Source.initPathString(
- package_json_cwd_buf[0 .. FileSystem.instance.top_level_dir.len + "package.json".len],
- package_json_contents,
- );
+ var package_json_source = logger.Source.initPathString(package_json_cwd, package_json_contents);
switch (load_lockfile_result) {
.err => |cause| {
@@ -7444,7 +7439,7 @@ pub const PackageManager = struct {
try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
}
- if (manager.log.errors > 0) Global.crash();
+ if (manager.log.hasErrors()) Global.crash();
const needs_clean_lockfile = had_any_diffs or needs_new_lockfile or manager.package_json_updates.len > 0;
var did_meta_hash_change = needs_clean_lockfile;
@@ -7479,51 +7474,48 @@ pub const PackageManager = struct {
// 2. There is a determinism issue in the file where alignment bytes might be garbage data
// This is a bug that needs to be fixed, however we can work around it for now
// by avoiding saving the lockfile
- if (manager.options.do.save_lockfile and (did_meta_hash_change or
- manager.lockfile.isEmpty() or
- manager.options.enable.force_save_lockfile))
- {
- save: {
- if (manager.lockfile.isEmpty()) {
- if (!manager.options.dry_run) {
- std.fs.cwd().deleteFileZ(manager.options.save_lockfile_path) catch |err| brk: {
- // we don't care
- if (err == error.FileNotFound) {
- if (had_any_diffs) break :save;
- break :brk;
- }
-
- if (log_level != .silent) Output.prettyErrorln("\n <red>error: {s} deleting empty lockfile", .{@errorName(err)});
- break :save;
- };
- }
- if (!manager.options.global) {
- if (log_level != .silent) Output.prettyErrorln("No packages! Deleted empty lockfile", .{});
- }
+ if (manager.options.do.save_lockfile and
+ (did_meta_hash_change or manager.lockfile.isEmpty() or manager.options.enable.force_save_lockfile))
+ save: {
+ if (manager.lockfile.isEmpty()) {
+ if (!manager.options.dry_run) {
+ std.fs.cwd().deleteFileZ(manager.options.save_lockfile_path) catch |err| brk: {
+ // we don't care
+ if (err == error.FileNotFound) {
+ if (had_any_diffs) break :save;
+ break :brk;
+ }
- break :save;
+ if (log_level != .silent) Output.prettyErrorln("\n <red>error: {s} deleting empty lockfile", .{@errorName(err)});
+ break :save;
+ };
+ }
+ if (!manager.options.global) {
+ if (log_level != .silent) Output.prettyErrorln("No packages! Deleted empty lockfile", .{});
}
- var node: *Progress.Node = undefined;
+ break :save;
+ }
- if (comptime log_level.showProgress()) {
- node = manager.progress.start(ProgressStrings.save(), 0);
- manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
- node.activate();
+ var node: *Progress.Node = undefined;
- manager.progress.refresh();
- }
+ if (comptime log_level.showProgress()) {
+ node = manager.progress.start(ProgressStrings.save(), 0);
+ manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
+ node.activate();
- manager.lockfile.saveToDisk(manager.options.save_lockfile_path);
- if (comptime log_level.showProgress()) {
- node.end();
- manager.progress.refresh();
- manager.progress.root.end();
- manager.progress = .{};
- } else if (comptime log_level != .silent) {
- Output.prettyErrorln(" Saved lockfile", .{});
- Output.flush();
- }
+ manager.progress.refresh();
+ }
+
+ manager.lockfile.saveToDisk(manager.options.save_lockfile_path);
+ if (comptime log_level.showProgress()) {
+ node.end();
+ manager.progress.refresh();
+ manager.progress.root.end();
+ manager.progress = .{};
+ } else if (comptime log_level != .silent) {
+ Output.prettyErrorln(" Saved lockfile", .{});
+ Output.flush();
}
}
diff --git a/test/cli/install/bun-add.test.ts b/test/cli/install/bun-add.test.ts
index b14afa918..95a201295 100644
--- a/test/cli/install/bun-add.test.ts
+++ b/test/cli/install/bun-add.test.ts
@@ -1390,20 +1390,21 @@ it("should add dependencies to workspaces directly", async () => {
const out = await new Response(stdout).text();
expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
"",
- ` installed foo@${add_path}`,
+ ` installed foo@${relative(package_dir, add_dir)}`,
"",
"",
" 1 packages installed",
]);
expect(await exited).toBe(0);
- expect(await readdirSorted(join(package_dir))).toEqual(["bunfig.toml", "moo", "package.json"]);
- expect(await file(join(package_dir, "package.json")).text()).toEqual(bar_package);
- expect(await readdirSorted(join(package_dir, "moo"))).toEqual([
+ expect(await readdirSorted(join(package_dir))).toEqual([
"bun.lockb",
"bunfig.toml",
+ "moo",
"node_modules",
"package.json",
]);
+ expect(await file(join(package_dir, "package.json")).text()).toEqual(bar_package);
+ expect(await readdirSorted(join(package_dir, "moo"))).toEqual(["bunfig.toml", "package.json"]);
expect(await file(join(package_dir, "moo", "package.json")).json()).toEqual({
name: "moo",
version: "0.3.0",
@@ -1411,7 +1412,7 @@ it("should add dependencies to workspaces directly", async () => {
foo: `file:${add_path}`,
},
});
- expect(await readdirSorted(join(package_dir, "moo", "node_modules"))).toEqual([".cache", "foo"]);
- expect(await readdirSorted(join(package_dir, "moo", "node_modules", "foo"))).toEqual(["package.json"]);
- expect(await file(join(package_dir, "moo", "node_modules", "foo", "package.json")).text()).toEqual(foo_package);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "foo"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "foo"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "node_modules", "foo", "package.json")).text()).toEqual(foo_package);
});
diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts
index 7d72ae318..f423c761a 100644
--- a/test/cli/install/bun-install.test.ts
+++ b/test/cli/install/bun-install.test.ts
@@ -4041,6 +4041,63 @@ it("should install dependencies in root package of workspace (*)", async () => {
await access(join(package_dir, "bun.lockb"));
});
+it("should ignore invalid workspaces from parent directory", async () => {
+ const urls: string[] = [];
+ setHandler(dummyRegistry(urls));
+ const foo_package = JSON.stringify({
+ name: "foo",
+ version: "0.1.0",
+ workspaces: ["moz"],
+ });
+ await writeFile(join(package_dir, "package.json"), foo_package);
+ await mkdir(join(package_dir, "moo"));
+ await writeFile(join(package_dir, "moo", "bunfig.toml"), await file(join(package_dir, "bunfig.toml")).text());
+ const moo_package = JSON.stringify({
+ name: "moo",
+ version: "0.2.0",
+ dependencies: {
+ bar: "^0.0.2",
+ },
+ });
+ await writeFile(join(package_dir, "moo", "package.json"), moo_package);
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install"],
+ cwd: join(package_dir, "moo"),
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + bar@0.0.2",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls.sort()).toEqual([`${root_url}/bar`, `${root_url}/bar-0.0.2.tgz`]);
+ expect(requested).toBe(2);
+ expect(await readdirSorted(join(package_dir))).toEqual(["bunfig.toml", "moo", "package.json"]);
+ expect(await file(join(package_dir, "package.json")).text()).toEqual(foo_package);
+ expect(await readdirSorted(join(package_dir, "moo"))).toEqual([
+ "bun.lockb",
+ "bunfig.toml",
+ "node_modules",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "moo", "package.json")).text()).toEqual(moo_package);
+ expect(await readdirSorted(join(package_dir, "moo", "node_modules"))).toEqual([".cache", "bar"]);
+ expect(await readdirSorted(join(package_dir, "moo", "node_modules", "bar"))).toEqual(["package.json"]);
+ expect(await file(join(package_dir, "moo", "node_modules", "bar", "package.json")).json()).toEqual({
+ name: "bar",
+ version: "0.0.2",
+ });
+});
+
it("should handle --cwd", async () => {
const urls: string[] = [];
setHandler(dummyRegistry(urls));