diff options
author | 2023-02-15 04:04:06 +0200 | |
---|---|---|
committer | 2023-02-14 18:04:06 -0800 | |
commit | 7597e4ad2a813f53a8924d3820b339d487469bdd (patch) | |
tree | 7fb15d075dc7eb99a559b8a389aba8c84233c9b6 | |
parent | a80981c9662bc439871ca2197a908632c82491d9 (diff) | |
download | bun-7597e4ad2a813f53a8924d3820b339d487469bdd.tar.gz bun-7597e4ad2a813f53a8924d3820b339d487469bdd.tar.zst bun-7597e4ad2a813f53a8924d3820b339d487469bdd.zip |
[install] improve `package.json` validation (#2074)
- report error and exit gracefully instead of crashing
-rw-r--r-- | src/install/install.zig | 108 | ||||
-rw-r--r-- | src/install/lockfile.zig | 118 | ||||
-rw-r--r-- | src/install/resolvers/folder_resolver.zig | 3 | ||||
-rw-r--r-- | src/json_parser.zig | 9 | ||||
-rw-r--r-- | src/report.zig | 12 | ||||
-rw-r--r-- | test/bun.js/install/bun-install.test.ts | 196 |
6 files changed, 342 insertions, 104 deletions
diff --git a/src/install/install.zig b/src/install/install.zig index 3034b82f8..ee46ad079 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -767,11 +767,10 @@ const PackageInstall = struct { resolution: *const Resolution, buf: []const u8, ) bool { - if (resolution.tag == .github) { - return this.verifyGitHubResolution(resolution, buf); - } - - return this.verifyPackageJSONNameAndVersion(); + return switch (resolution.tag) { + .github => this.verifyGitHubResolution(resolution, buf), + else => this.verifyPackageJSONNameAndVersion(), + }; } fn verifyPackageJSONNameAndVersion(this: *PackageInstall) bool { @@ -1026,7 +1025,7 @@ const PackageInstall = struct { progress_.refresh(); Output.prettyErrorln("<r><red>{s}<r>: copying file {s}", .{ @errorName(err), entry.path }); - Global.exit(1); + Global.crash(); }; }; defer outfile.close(); @@ -1043,7 +1042,7 @@ const PackageInstall = struct { progress_.refresh(); Output.prettyErrorln("<r><red>{s}<r>: copying file {s}", .{ @errorName(err), entry.path }); - Global.exit(1); + Global.crash(); }; } @@ -1222,8 +1221,8 @@ const PackageInstall = struct { }; } - pub fn uninstall(this: *PackageInstall) !void { - try this.destination_dir.dir.deleteTree(bun.span(this.destination_dir_subpath)); + pub fn uninstall(this: *PackageInstall) void { + this.destination_dir.dir.deleteTree(bun.span(this.destination_dir_subpath)) catch {}; } fn isDanglingSymlink(path: [:0]const u8) bool { @@ -1253,7 +1252,7 @@ const PackageInstall = struct { const dest_path = this.destination_dir_subpath; // If this fails, we don't care. // we'll catch it the next error - if (!skip_delete and !strings.eqlComptime(dest_path, ".")) this.uninstall() catch {}; + if (!skip_delete and !strings.eqlComptime(dest_path, ".")) this.uninstall(); const subdir = std.fs.path.dirname(dest_path); var dest_dir = if (subdir) |dir| brk: { @@ -1309,7 +1308,7 @@ const PackageInstall = struct { // If this fails, we don't care. // we'll catch it the next error - if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) this.uninstall() catch {}; + if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) this.uninstall(); var supported_method_to_use = if (strings.eqlComptime(this.cache_dir_subpath, ".") or strings.hasPrefixComptime(this.cache_dir_subpath, "..")) Method.symlink @@ -1883,7 +1882,7 @@ pub const PackageManager = struct { return cachedGitHubFolderNamePrint(&cached_package_folder_name_buf, this.lockfile.str(&repository.resolved)); } - pub fn cachedGitHubFolderNamePrintGuess(buf: []u8, string_buf: []const u8, repository: *const Repository) stringZ { + fn cachedGitHubFolderNamePrintGuess(buf: []u8, string_buf: []const u8, repository: *const Repository) stringZ { return std.fmt.bufPrintZ( buf, "@GH@{any}-{any}-{any}", @@ -3027,9 +3026,8 @@ pub const PackageManager = struct { ); var package = Lockfile.Package{}; - Lockfile.Package.parse( + package.parse( manager.lockfile, - &package, manager.allocator, manager.log, package_json_source, @@ -3043,7 +3041,7 @@ pub const PackageManager = struct { if (comptime log_level != .silent) { const string_buf = manager.lockfile.buffers.string_bytes.items; Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{ - package.resolution.fmtURL(&manager.options, string_buf), + resolution.fmtURL(&manager.options, string_buf), @errorName(err), }); } @@ -4668,7 +4666,7 @@ pub const PackageManager = struct { var lockfile: Lockfile = undefined; var name: string = ""; - var package: Lockfile.Package = Lockfile.Package{}; + var package = Lockfile.Package{}; // Step 1. parse the nearest package.json file { @@ -4685,18 +4683,20 @@ pub const PackageManager = struct { ); try lockfile.initEmpty(ctx.allocator); - try Lockfile.Package.parseMain(&lockfile, &package, ctx.allocator, manager.log, package_json_source, Features.folder); + try package.parseMain(&lockfile, ctx.allocator, manager.log, package_json_source, Features.folder); name = lockfile.str(&package.name); if (name.len == 0) { - if (manager.options.log_level != .silent) + if (manager.options.log_level != .silent) { Output.prettyErrorln("<r><red>error:<r> package.json missing \"name\" <d>in \"{s}\"<r>", .{package_json_source.path.text}); + } Global.crash(); } else if (!strings.isNPMPackageName(name)) { - if (manager.options.log_level != .silent) - Output.prettyErrorln("<r><red>error:<r> invalid package.json name \"{s}\" <d>in \"{s}\"<r>", .{ + if (manager.options.log_level != .silent) { + Output.prettyErrorln("<r><red>error:<r> invalid package.json name \"{s}\" <d>in \"{any}\"<r>", .{ name, package_json_source.path.text, }); + } Global.crash(); } } @@ -4830,7 +4830,7 @@ pub const PackageManager = struct { var lockfile: Lockfile = undefined; var name: string = ""; - var package: Lockfile.Package = Lockfile.Package{}; + var package = Lockfile.Package{}; // Step 1. parse the nearest package.json file { @@ -4847,18 +4847,20 @@ pub const PackageManager = struct { ); try lockfile.initEmpty(ctx.allocator); - try Lockfile.Package.parseMain(&lockfile, &package, ctx.allocator, manager.log, package_json_source, Features.folder); + try package.parseMain(&lockfile, ctx.allocator, manager.log, package_json_source, Features.folder); name = lockfile.str(&package.name); if (name.len == 0) { - if (manager.options.log_level != .silent) + if (manager.options.log_level != .silent) { Output.prettyErrorln("<r><red>error:<r> package.json missing \"name\" <d>in \"{s}\"<r>", .{package_json_source.path.text}); + } Global.crash(); } else if (!strings.isNPMPackageName(name)) { - if (manager.options.log_level != .silent) + if (manager.options.log_level != .silent) { Output.prettyErrorln("<r><red>error:<r> invalid package.json name \"{s}\" <d>in \"{s}\"<r>", .{ name, package_json_source.path.text, }); + } Global.crash(); } } @@ -4922,7 +4924,7 @@ pub const PackageManager = struct { Global.exit(0); } else { Output.prettyln("<r><red>error:<r> bun unlink {{packageName}} not implemented yet", .{}); - Global.exit(1); + Global.crash(); } } @@ -5102,7 +5104,7 @@ pub const PackageManager = struct { // cli.omit.peer = true; // } else { // Output.prettyErrorln("<b>error<r><d>:<r> Invalid argument <b>\"--omit\"<r> must be one of <cyan>\"dev\"<r>, <cyan>\"optional\"<r>, or <cyan>\"peer\"<r>. ", .{}); - // Global.exit(1); + // Global.crash(); // } // } @@ -5208,7 +5210,7 @@ pub const PackageManager = struct { Output.prettyErrorln("<r><red>error<r><d>:<r> unrecognised dependency format: {s}", .{ positional, }); - Global.exit(1); + Global.crash(); }; if (switch (version.tag) { .dist_tag => version.value.dist_tag.name.eql(placeholder, input, input), @@ -5218,7 +5220,7 @@ pub const PackageManager = struct { Output.prettyErrorln("<r><red>error<r><d>:<r> unrecognised dependency format: {s}", .{ positional, }); - Global.exit(1); + Global.crash(); } var request = UpdateRequest{ @@ -5461,12 +5463,10 @@ pub const PackageManager = struct { if (op == .remove) { if (current_package_json.data != .e_object) { Output.prettyErrorln("<red>error<r><d>:<r> package.json is not an Object {{}}, so there's nothing to remove!", .{}); - Global.exit(1); - return; + Global.crash(); } else if (current_package_json.data.e_object.properties.len == 0) { Output.prettyErrorln("<red>error<r><d>:<r> package.json is empty {{}}, so there's nothing to remove!", .{}); - Global.exit(1); - return; + Global.crash(); } else if (current_package_json.asProperty("devDependencies") == null and current_package_json.asProperty("dependencies") == null and current_package_json.asProperty("optionalDependencies") == null and @@ -5474,7 +5474,6 @@ pub const PackageManager = struct { { Output.prettyErrorln("package.json doesn't have dependencies, there's nothing to remove!", .{}); Global.exit(0); - return; } } @@ -5959,8 +5958,8 @@ pub const PackageManager = struct { } if (this.manager.options.enable.fail_early) { - installer.uninstall() catch {}; - Global.exit(1); + installer.uninstall(); + Global.crash(); } } } @@ -6337,9 +6336,7 @@ pub const PackageManager = struct { } } - if (this.options.enable.fail_early) { - Global.exit(1); - } + if (this.options.enable.fail_early) Global.crash(); } continue :outer; @@ -6460,7 +6457,7 @@ pub const PackageManager = struct { Output.flush(); } - if (manager.options.enable.fail_early) Global.exit(1); + if (manager.options.enable.fail_early) Global.crash(); }, .ok => { differ: { @@ -6479,9 +6476,8 @@ pub const PackageManager = struct { try lockfile.initEmpty(ctx.allocator); var maybe_root = Lockfile.Package{}; - try Lockfile.Package.parseMain( + try maybe_root.parseMain( &lockfile, - &maybe_root, ctx.allocator, ctx.log, package_json_source, @@ -6504,11 +6500,10 @@ pub const PackageManager = struct { had_any_diffs = had_any_diffs or sum > 0; if (manager.options.enable.frozen_lockfile and had_any_diffs) { - if (log_level != .silent) { + if (comptime log_level != .silent) { Output.prettyErrorln("<r><red>error<r>: lockfile had changes, but lockfile is frozen", .{}); } - - Global.exit(1); + Global.crash(); } // If you changed packages, we will copy over the new package from the new lockfile @@ -6591,16 +6586,14 @@ pub const PackageManager = struct { try manager.lockfile.initEmpty(ctx.allocator); if (manager.options.enable.frozen_lockfile) { - if (log_level != .silent) { + if (comptime log_level != .silent) { Output.prettyErrorln("<r><red>error<r>: lockfile had changes, but lockfile is frozen", .{}); } - - Global.exit(1); + Global.crash(); } - try Lockfile.Package.parseMain( + try root.parseMain( manager.lockfile, - &root, ctx.allocator, ctx.log, package_json_source, @@ -6615,10 +6608,7 @@ pub const PackageManager = struct { _ = manager.getCacheDirectory(); _ = manager.getTemporaryDirectory(); } - manager.enqueueDependencyList( - root.dependencies, - true, - ); + manager.enqueueDependencyList(root.dependencies, true); } manager.flushDependencyQueue(); @@ -6644,10 +6634,10 @@ pub const PackageManager = struct { *PackageManager, manager, .{ - .onExtract = void{}, - .onResolve = void{}, - .onPackageManifestError = void{}, - .onPackageDownloadError = void{}, + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, .progress_bar = true, }, log_level, @@ -6668,9 +6658,7 @@ pub const PackageManager = struct { try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); } - if (manager.log.errors > 0) { - Global.exit(1); - } + if (manager.log.errors > 0) Global.crash(); const needs_clean_lockfile = had_any_diffs or needs_new_lockfile or manager.package_json_updates.len > 0; var did_meta_hash_change = needs_clean_lockfile; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index cb4326699..eb0ca6c3a 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -889,15 +889,13 @@ pub const Printer = struct { try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); } } - Global.exit(1); - return; + Global.crash(); }, .not_found => { Output.prettyErrorln("<r><red>lockfile not found:<r> {s}", .{ std.mem.span(lockfile_path), }); - Global.exit(1); - return; + Global.crash(); }, .ok => {}, @@ -1372,9 +1370,7 @@ pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_featu } } - if (any_failed) { - Global.exit(1); - } + if (any_failed) Global.crash(); } pub fn saveToDisk(this: *Lockfile, filename: stringZ) void { @@ -2281,19 +2277,19 @@ pub const Package = extern struct { } pub fn parseMain( - lockfile: *Lockfile, package: *Lockfile.Package, + lockfile: *Lockfile, allocator: std.mem.Allocator, log: *logger.Log, source: logger.Source, comptime features: Features, ) !void { - return parse(lockfile, package, allocator, log, source, void, void{}, features); + return package.parse(lockfile, allocator, log, source, void, void{}, features); } pub fn parse( - lockfile: *Lockfile, package: *Lockfile.Package, + lockfile: *Lockfile, allocator: std.mem.Allocator, log: *logger.Log, source: logger.Source, @@ -2303,10 +2299,7 @@ pub const Package = extern struct { ) !void { initializeStore(); - // A valid package.json always has "{}" characters - if (source.contents.len < 2) return error.InvalidPackageJSON; - - var json = json_parser.ParseJSONUTF8(&source, log, allocator) catch |err| { + const json = json_parser.ParseJSONUTF8(&source, log, allocator) catch |err| { if (Output.enable_ansi_colors) { log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; } else { @@ -2314,11 +2307,10 @@ pub const Package = extern struct { } Output.prettyErrorln("<r><red>{s}<r> parsing package.json in <b>\"{s}\"<r>", .{ @errorName(err), source.path.prettyDir() }); - Global.exit(1); + Global.crash(); }; - try parseWithJSON( - package, + try package.parseWithJSON( lockfile, allocator, log, @@ -2398,7 +2390,12 @@ pub const Package = extern struct { ); dependency_version.value.workspace = path; var workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, @truncate(u32, external_name.hash)); - if (workspace_entry.found_existing) return error.@"Workspace name already exists"; + if (workspace_entry.found_existing) { + log.addErrorFmt(&source, key.loc, allocator, "Workspace name \"{s}\" already exists", .{ + external_name.slice(buf), + }) catch {}; + return error.InstallFailed; + } workspace_entry.value_ptr.* = path; }, else => {}, @@ -2593,6 +2590,15 @@ pub const Package = extern struct { if (json.asProperty(group.prop)) |dependencies_q| brk: { switch (dependencies_q.expr.data) { .e_array => |arr| { + if (!group.behavior.isWorkspace()) { + log.addErrorFmt(&source, dependencies_q.loc, allocator, + \\{0s} expects a map of specifiers, e.g. + \\"{0s}": {{ + \\ "bun": "latest" + \\}} + , .{group.prop}) catch {}; + return error.InvalidPackageJSON; + } if (arr.items.len == 0) break :brk; workspace_names = try allocator.alloc(string, arr.items.len); @@ -2604,7 +2610,15 @@ pub const Package = extern struct { for (arr.slice()) |item, i| { defer fallback.fixed_buffer_allocator.reset(); - const path = item.asString(allocator) orelse return error.InvalidPackageJSON; + const path = item.asString(allocator) orelse { + log.addErrorFmt(&source, item.loc, allocator, + \\Workspaces expects an array of strings, e.g. + \\"workspaces": [ + \\ "path/to/package" + \\] + , .{}) catch {}; + return error.InvalidPackageJSON; + }; var workspace_dir = std.fs.cwd().openIterableDir(path, .{}) catch |err| { if (err == error.FileNotFound) { @@ -2661,7 +2675,17 @@ pub const Package = extern struct { var workspace_json = try json_parser.PackageJSONVersionChecker.init(allocator, &workspace_source, log); _ = try workspace_json.parseExpr(); - if (!workspace_json.has_found_name) return error.InvalidPackageJSON; + if (!workspace_json.has_found_name) { + log.addErrorFmt( + &source, + dependencies_q.loc, + allocator, + "Missing \"name\" from package.json in {s}", + .{workspace_source.path.text}, + ) catch {}; + // report errors for multiple workspaces + continue; + } const workspace_name = workspace_json.found_name; @@ -2671,15 +2695,38 @@ pub const Package = extern struct { workspace_names[i] = try allocator.dupe(u8, workspace_name); } - if (orig_msgs_len != log.msgs.items.len) { - return error.InstallFailed; - } + if (orig_msgs_len != log.msgs.items.len) return error.InstallFailed; total_dependencies_count += @truncate(u32, arr.items.len); }, .e_object => |obj| { + if (group.behavior.isWorkspace()) { + log.addErrorFmt(&source, dependencies_q.loc, allocator, + \\Workspaces expects an array of strings, e.g. + \\"workspaces": [ + \\ "path/to/package" + \\] + , .{}) catch {}; + return error.InvalidPackageJSON; + } for (obj.properties.slice()) |item| { - const key = item.key.?.asString(allocator) orelse return error.InvalidPackageJSON; - const value = item.value.?.asString(allocator) orelse return error.InvalidPackageJSON; + const key = item.key.?.asString(allocator) orelse { + log.addErrorFmt(&source, item.key.?.loc, allocator, + \\{0s} expects a map of specifiers, e.g. + \\"{0s}": {{ + \\ "bun": "latest" + \\}} + , .{group.prop}) catch {}; + return error.InvalidPackageJSON; + }; + const value = item.value.?.asString(allocator) orelse { + log.addErrorFmt(&source, item.value.?.loc, allocator, + \\{0s} expects a map of specifiers, e.g. + \\"{0s}": {{ + \\ "bun": "latest" + \\}} + , .{group.prop}) catch {}; + return error.InvalidPackageJSON; + }; string_builder.count(key); string_builder.count(value); @@ -2691,7 +2738,24 @@ pub const Package = extern struct { } total_dependencies_count += @truncate(u32, obj.properties.len); }, - else => {}, + else => { + if (group.behavior.isWorkspace()) { + log.addErrorFmt(&source, dependencies_q.loc, allocator, + \\Workspaces expects an array of strings, e.g. + \\"workspaces": [ + \\ "path/to/package" + \\] + , .{}) catch {}; + } else { + log.addErrorFmt(&source, dependencies_q.loc, allocator, + \\{0s} expects a map of specifiers, e.g. + \\"{0s}": {{ + \\ "bun": "latest" + \\}} + , .{group.prop}) catch {}; + } + return error.InvalidPackageJSON; + }, } } } @@ -2899,7 +2963,7 @@ pub const Package = extern struct { } } }, - else => {}, + else => unreachable, } } } diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index f525ee613..968ee57ca 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -149,9 +149,8 @@ pub const FolderResolution = union(Tag) { const source = logger.Source.initPathString(abs, body.data.list.items[0..source_buf]); - try Lockfile.Package.parse( + try package.parse( manager.lockfile, - &package, manager.allocator, manager.log, source, diff --git a/src/json_parser.zig b/src/json_parser.zig index fb3b7dedf..b420cbd43 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -318,15 +318,6 @@ fn JSONLikeParser_( } try p.lexer.unexpected(); - if (comptime Environment.isDebug) { - std.io.getStdErr().writer().print("\nThis range: {d} - {d} \n{s}", .{ - p.lexer.range().loc.start, - p.lexer.range().end().start, - p.lexer.range().in(p.lexer.source.contents), - }) catch {}; - - @breakpoint(); - } return error.ParserError; }, } diff --git a/src/report.zig b/src/report.zig index c8c1304b6..62b807f0e 100644 --- a/src/report.zig +++ b/src/report.zig @@ -336,7 +336,7 @@ pub noinline fn globalError(err: anyerror) noreturn { ); Global.exit(1); }, - error.InvalidArgument, error.InstallFailed => { + error.InvalidArgument, error.InstallFailed, error.InvalidPackageJSON => { Global.exit(1); }, error.SystemFdQuotaExceeded => { @@ -349,7 +349,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\<d>Current limit: {d}<r> \\ \\To fix this, try running: - \\ + \\ \\ <cyan>sudo launchctl limit maxfiles 2147483646<r> \\ <cyan>ulimit -n 2147483646<r> \\ @@ -368,7 +368,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\<d>Current limit: {d}<r> \\ \\To fix this, try running: - \\ + \\ \\ <cyan>sudo echo -e "\nfs.file-max=2147483646\n" >> /etc/sysctl.conf<r> \\ <cyan>sudo sysctl -p<r> \\ <cyan>ulimit -n 2147483646<r> @@ -410,7 +410,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\<d>Current limit: {d}<r> \\ \\To fix this, try running: - \\ + \\ \\ <cyan>ulimit -n 2147483646<r> \\ \\You may also need to run: @@ -430,7 +430,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\<d>Current limit: {d}<r> \\ \\To fix this, try running: - \\ + \\ \\ <cyan>ulimit -n 2147483646<r> \\ \\That will only work for the current shell. To fix this for the entire system, run: @@ -474,7 +474,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\<d>Current limit: {d}<r> \\ \\To fix this, try running: - \\ + \\ \\ <cyan>ulimit -n 2147483646<r> \\ , diff --git a/test/bun.js/install/bun-install.test.ts b/test/bun.js/install/bun-install.test.ts index 9d7bc81cf..a47bbd2ce 100644 --- a/test/bun.js/install/bun-install.test.ts +++ b/test/bun.js/install/bun-install.test.ts @@ -1835,3 +1835,199 @@ it("should not regard peerDependencies declarations as duplicates", async () => }); await access(join(package_dir, "bun.lockb")); }); + +it("should report error on invalid format for package.json", async () => { + await writeFile(join(package_dir, "package.json"), "foo"); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err.replace(/^bun install v.+\n/, "bun install\n").split(/\r?\n/)).toEqual([ + "bun install", + "", + "", + "error: Unexpected foo", + "foo", + "^", + `${package_dir}/package.json:1:1 0`, + `ParserError parsing package.json in "${package_dir}/"`, + "", + ]); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out).toEqual(""); + expect(await exited).toBe(1); +}); + +it("should report error on invalid format for dependencies", async () => { + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: [], + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err.replace(/^bun install v.+\n/, "bun install\n").split(/\r?\n/)).toEqual([ + "bun install", + "", + "", + "error: dependencies expects a map of specifiers, e.g.", + '"dependencies": {', + ' "bun": "latest"', + "}", + '{"name":"foo","version":"0.0.1","dependencies":[]}', + " ^", + `${package_dir}/package.json:1:33 32`, + "", + ]); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out).toEqual(""); + expect(await exited).toBe(1); +}); + +it("should report error on invalid format for optionalDependencies", async () => { + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + optionalDependencies: "bar", + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err.replace(/^bun install v.+\n/, "bun install\n").split(/\r?\n/)).toEqual([ + "bun install", + "", + "", + "error: optionalDependencies expects a map of specifiers, e.g.", + '"optionalDependencies": {', + ' "bun": "latest"', + "}", + '{"name":"foo","version":"0.0.1","optionalDependencies":"bar"}', + " ^", + `${package_dir}/package.json:1:33 32`, + "", + ]); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out).toEqual(""); + expect(await exited).toBe(1); +}); + +it("should report error on invalid format for workspaces", async () => { + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + workspaces: { + packages: ["bar"], + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err.replace(/^bun install v.+\n/, "bun install\n").split(/\r?\n/)).toEqual([ + "bun install", + "", + "", + "error: Workspaces expects an array of strings, e.g.", + '"workspaces": [', + ' "path/to/package"', + "]", + '{"name":"foo","version":"0.0.1","workspaces":{"packages":["bar"]}}', + " ^", + `${package_dir}/package.json:1:33 32`, + "", + ]); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out).toEqual(""); + expect(await exited).toBe(1); +}); + +it("should report error on duplicated workspace packages", async () => { + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + workspaces: ["bar", "baz"], + }), + ); + await mkdir(join(package_dir, "bar")); + await writeFile( + join(package_dir, "bar", "package.json"), + JSON.stringify({ + name: "moo", + version: "0.0.2", + }), + ); + await mkdir(join(package_dir, "baz")); + await writeFile( + join(package_dir, "baz", "package.json"), + JSON.stringify({ + name: "moo", + version: "0.0.3", + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err.replace(/^bun install v.+\n/, "bun install\n").split(/\r?\n/)).toEqual([ + "bun install", + "", + "", + 'error: Workspace name "moo" already exists', + '{"name":"foo","version":"0.0.1","workspaces":["bar","baz"]}', + " ^", + `${package_dir}/package.json:1:53 52`, + "", + ]); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out).toEqual(""); + expect(await exited).toBe(1); +}); |