aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/install/bin.zig2
-rw-r--r--src/install/extract_tarball.zig137
-rw-r--r--src/install/install.zig284
-rw-r--r--src/install/integrity.zig2
-rw-r--r--src/install/lockfile.zig77
-rw-r--r--src/install/npm.zig8
-rw-r--r--src/install/repository.zig2
-rw-r--r--src/logger.zig15
-rw-r--r--test/cli/install/bun-install.test.ts74
9 files changed, 342 insertions, 259 deletions
diff --git a/src/install/bin.zig b/src/install/bin.zig
index b0e988269..f8117c1e8 100644
--- a/src/install/bin.zig
+++ b/src/install/bin.zig
@@ -281,7 +281,7 @@ pub const Bin = extern struct {
if (name[0] != '@') return name;
var name_ = name;
name_ = name[1..];
- return name_[(std.mem.indexOfScalar(u8, name_, '/') orelse return name) + 1 ..];
+ return name_[(strings.indexOfChar(name_, '/') orelse return name) + 1 ..];
}
fn setPermissions(folder: std.os.fd_t, target: [:0]const u8) void {
diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig
index 0e5f8e188..3be00853f 100644
--- a/src/install/extract_tarball.zig
+++ b/src/install/extract_tarball.zig
@@ -157,13 +157,14 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
var tmpdir = this.temp_dir;
var tmpname_buf: [256]u8 = undefined;
const name = this.name.slice();
-
- var basename = this.name.slice();
- if (basename[0] == '@') {
- if (std.mem.indexOfScalar(u8, basename, '/')) |i| {
- basename = basename[i + 1 ..];
+ const basename = brk: {
+ if (name[0] == '@') {
+ if (strings.indexOfChar(name, '/')) |i| {
+ break :brk name[i + 1 ..];
+ }
}
- }
+ break :brk name;
+ };
var resolved: string = "";
var tmpname = try FileSystem.instance.tmpname(basename[0..@min(basename.len, 32)], &tmpname_buf, tgz_bytes.len);
@@ -216,8 +217,8 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
};
var dirname_reader = DirnameReader{ .outdirname = &resolved };
- _ = if (PackageManager.verbose_install)
- try Archive.extractToDir(
+ switch (PackageManager.verbose_install) {
+ inline else => |log| _ = try Archive.extractToDir(
zlib_pool.data.list.items,
extract_destination,
null,
@@ -226,20 +227,9 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
// for GitHub tarballs, the root dir is always <user>-<repo>-<commit_id>
1,
true,
- true,
- )
- else
- try Archive.extractToDir(
- zlib_pool.data.list.items,
- extract_destination,
- null,
- *DirnameReader,
- &dirname_reader,
- // for GitHub tarballs, the root dir is always <user>-<repo>-<commit_id>
- 1,
- true,
- false,
- );
+ log,
+ ),
+ }
// This tag is used to know which version of the package was
// installed from GitHub. package.json version becomes sort of
@@ -252,31 +242,18 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
};
}
},
- else => {
- _ = if (PackageManager.verbose_install)
- try Archive.extractToDir(
- zlib_pool.data.list.items,
- extract_destination,
- null,
- void,
- {},
- // for npm packages, the root dir is always "package"
- 1,
- true,
- true,
- )
- else
- try Archive.extractToDir(
- zlib_pool.data.list.items,
- extract_destination,
- null,
- void,
- {},
- // for npm packages, the root dir is always "package"
- 1,
- true,
- false,
- );
+ else => switch (PackageManager.verbose_install) {
+ inline else => |log| _ = try Archive.extractToDir(
+ zlib_pool.data.list.items,
+ extract_destination,
+ null,
+ void,
+ {},
+ // for npm packages, the root dir is always "package"
+ 1,
+ true,
+ log,
+ ),
},
}
@@ -343,7 +320,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
};
// create an index storing each version of a package installed
- if (std.mem.indexOfScalar(u8, basename, '/') == null) create_index: {
+ if (strings.indexOfChar(basename, '/') == null) create_index: {
var index_dir = cache_dir.makeOpenPathIterable(name, .{}) catch break :create_index;
defer index_dir.close();
index_dir.dir.symLink(
@@ -361,39 +338,39 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
var json_path: []u8 = "";
var json_buf: []u8 = "";
var json_len: usize = 0;
- switch (this.resolution.tag) {
- .github, .local_tarball, .remote_tarball => {
- const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| {
- this.package_manager.log.addErrorFmt(
- null,
- logger.Loc.Empty,
- this.package_manager.allocator,
- "\"package.json\" for \"{s}\" failed to open: {s}",
- .{ name, @errorName(err) },
- ) catch unreachable;
- return error.InstallFailed;
- };
- defer json_file.close();
- const json_stat = try json_file.stat();
- json_buf = try this.package_manager.allocator.alloc(u8, json_stat.size + 64);
- json_len = try json_file.preadAll(json_buf, 0);
+ if (switch (this.resolution.tag) {
+ // TODO remove extracted files not matching any globs under "files"
+ .github, .local_tarball, .remote_tarball => true,
+ else => this.package_manager.lockfile.trusted_dependencies.contains(@truncate(u32, Semver.String.Builder.stringHash(name))),
+ }) {
+ const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| {
+ this.package_manager.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ this.package_manager.allocator,
+ "\"package.json\" for \"{s}\" failed to open: {s}",
+ .{ name, @errorName(err) },
+ ) catch unreachable;
+ return error.InstallFailed;
+ };
+ defer json_file.close();
+ const json_stat = try json_file.stat();
+ json_buf = try this.package_manager.allocator.alloc(u8, json_stat.size + 64);
+ json_len = try json_file.preadAll(json_buf, 0);
- json_path = bun.getFdPath(
- json_file.handle,
- &json_path_buf,
- ) catch |err| {
- this.package_manager.log.addErrorFmt(
- null,
- logger.Loc.Empty,
- this.package_manager.allocator,
- "\"package.json\" for \"{s}\" failed to resolve: {s}",
- .{ name, @errorName(err) },
- ) catch unreachable;
- return error.InstallFailed;
- };
- // TODO remove extracted files not matching any globs under "files"
- },
- else => {},
+ json_path = bun.getFdPath(
+ json_file.handle,
+ &json_path_buf,
+ ) catch |err| {
+ this.package_manager.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ this.package_manager.allocator,
+ "\"package.json\" for \"{s}\" failed to resolve: {s}",
+ .{ name, @errorName(err) },
+ ) catch unreachable;
+ return error.InstallFailed;
+ };
}
const ret_json_path = try FileSystem.instance.dirname_store.append(@TypeOf(json_path), json_path);
diff --git a/src/install/install.zig b/src/install/install.zig
index 81e2a7bb8..22068bbf3 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -166,8 +166,8 @@ pub const ExternalStringList = ExternalSlice(ExternalString);
pub const VersionSlice = ExternalSlice(Semver.Version);
pub const ExternalStringMap = extern struct {
- name: ExternalStringList = ExternalStringList{},
- value: ExternalStringList = ExternalStringList{},
+ name: ExternalStringList = .{},
+ value: ExternalStringList = .{},
};
pub const PackageNameHash = u64;
@@ -467,7 +467,7 @@ pub const Features = struct {
is_main: bool = false,
optional_dependencies: bool = false,
peer_dependencies: bool = true,
- scripts: bool = false,
+ trusted_dependencies: bool = false,
workspaces: bool = false,
check_for_duplicate_dependencies: bool = false,
@@ -487,7 +487,7 @@ pub const Features = struct {
.dev_dependencies = true,
.is_main = true,
.optional_dependencies = true,
- .scripts = true,
+ .trusted_dependencies = true,
.workspaces = true,
};
@@ -499,7 +499,7 @@ pub const Features = struct {
pub const workspace = Features{
.dev_dependencies = true,
.optional_dependencies = true,
- .scripts = true,
+ .trusted_dependencies = true,
};
pub const link = Features{
@@ -3546,7 +3546,34 @@ pub const PackageManager = struct {
return package;
},
- else => {},
+ else => if (data.json_len > 0) {
+ const package_json_source = logger.Source.initPathString(
+ data.json_path,
+ data.json_buf[0..data.json_len],
+ );
+ initializeStore();
+ const json = json_parser.ParseJSONUTF8(
+ &package_json_source,
+ manager.log,
+ manager.allocator,
+ ) catch |err| {
+ if (comptime log_level != .silent) {
+ const string_buf = manager.lockfile.buffers.string_bytes.items;
+ Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{
+ resolution.fmtURL(&manager.options, string_buf),
+ @errorName(err),
+ });
+ }
+ Global.crash();
+ };
+ var builder = manager.lockfile.stringBuilder();
+ Lockfile.Package.Scripts.parseCount(manager.allocator, &builder, json);
+ builder.allocate() catch unreachable;
+ if (comptime Environment.allow_assert) std.debug.assert(package_id.* != invalid_package_id);
+ var scripts = manager.lockfile.packages.items(.scripts)[package_id.*];
+ scripts.parseAlloc(manager.allocator, &builder, json);
+ scripts.filled = true;
+ },
}
return null;
@@ -3910,10 +3937,10 @@ pub const PackageManager = struct {
var task: Task = task_;
if (task.log.msgs.items.len > 0) {
- if (Output.enable_ansi_colors) {
- try task.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
- } else {
- try task.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ try task.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors);
+ },
}
}
@@ -5308,36 +5335,31 @@ pub const PackageManager = struct {
// When using bun, we only do staleness checks once per day
) -| std.time.s_per_day;
- manager.lockfile = brk: {
+ if (root_dir.entries.hasComptimeQuery("bun.lockb")) {
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var parts = [_]string{
+ "./bun.lockb",
+ };
+ var lockfile_path = Path.joinAbsStringBuf(
+ Fs.FileSystem.instance.top_level_dir,
+ &buf,
+ &parts,
+ .auto,
+ );
+ buf[lockfile_path.len] = 0;
+ var lockfile_path_z = buf[0..lockfile_path.len :0];
- if (root_dir.entries.hasComptimeQuery("bun.lockb")) {
- var parts = [_]string{
- "./bun.lockb",
- };
- var lockfile_path = Path.joinAbsStringBuf(
- Fs.FileSystem.instance.top_level_dir,
- &buf,
- &parts,
- .auto,
- );
- buf[lockfile_path.len] = 0;
- var lockfile_path_z = buf[0..lockfile_path.len :0];
-
- const result = manager.lockfile.loadFromDisk(
- allocator,
- log,
- lockfile_path_z,
- );
-
- if (result == .ok) {
- break :brk result.ok;
- }
+ switch (manager.lockfile.loadFromDisk(
+ allocator,
+ log,
+ lockfile_path_z,
+ )) {
+ .ok => |lockfile| manager.lockfile = lockfile,
+ else => try manager.lockfile.initEmpty(allocator),
}
-
+ } else {
try manager.lockfile.initEmpty(allocator);
- break :brk manager.lockfile;
- };
+ }
return manager;
}
@@ -5448,7 +5470,7 @@ pub const PackageManager = struct {
// create scope if specified
if (name[0] == '@') {
- if (std.mem.indexOfScalar(u8, name, '/')) |i| {
+ if (strings.indexOfChar(name, '/')) |i| {
node_modules.dir.makeDir(name[0..i]) catch |err| brk: {
if (err == error.PathAlreadyExists) break :brk;
if (manager.options.log_level != .silent)
@@ -5513,11 +5535,7 @@ pub const PackageManager = struct {
} else {
// bun link lodash
switch (manager.options.log_level) {
- .default => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .default),
- .verbose => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .verbose),
- .silent => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .silent),
- .default_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .default_no_progress),
- .verbose_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .verbose_no_progress),
+ inline else => |log_level| try updatePackageJSONAndInstallWithManager(ctx, manager, .link, log_level),
}
}
}
@@ -6019,11 +6037,7 @@ pub const PackageManager = struct {
}
switch (manager.options.log_level) {
- .default => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .default),
- .verbose => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .verbose),
- .silent => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .silent),
- .default_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .default_no_progress),
- .verbose_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .verbose_no_progress),
+ inline else => |log_level| try updatePackageJSONAndInstallWithManager(ctx, manager, op, log_level),
}
}
@@ -6153,13 +6167,12 @@ pub const PackageManager = struct {
) !void {
if (ctx.log.errors > 0) {
if (comptime log_level != .silent) {
- if (Output.enable_ansi_colors) {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
- } else {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {};
+ },
}
}
-
Global.crash();
}
@@ -6183,10 +6196,10 @@ pub const PackageManager = struct {
initializeStore();
var current_package_json = json_parser.ParseJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| {
- if (Output.enable_ansi_colors) {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
- } else {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {};
+ },
}
if (err == error.ParserError and ctx.log.errors > 0) {
@@ -6313,7 +6326,7 @@ pub const PackageManager = struct {
// haha unless
defer if (auto_free) bun.default_allocator.free(old_ast_nodes);
- try installWithManager(ctx, manager, new_package_json_source, log_level);
+ try manager.installWithManager(ctx, new_package_json_source, log_level);
if (op == .update or op == .add or op == .link) {
for (manager.package_json_updates) |update| {
@@ -6435,11 +6448,7 @@ pub const PackageManager = struct {
};
try switch (manager.options.log_level) {
- .default => installWithManager(ctx, manager, package_json_contents, .default),
- .verbose => installWithManager(ctx, manager, package_json_contents, .verbose),
- .silent => installWithManager(ctx, manager, package_json_contents, .silent),
- .default_no_progress => installWithManager(ctx, manager, package_json_contents, .default_no_progress),
- .verbose_no_progress => installWithManager(ctx, manager, package_json_contents, .verbose_no_progress),
+ inline else => |log_level| manager.installWithManager(ctx, package_json_contents, log_level),
};
}
@@ -6593,10 +6602,10 @@ pub const PackageManager = struct {
const args = .{ name, @errorName(err) };
if (comptime log_level.showProgress()) {
- if (Output.enable_ansi_colors) {
- this.progress.log(comptime Output.prettyFmt(fmt, true), args);
- } else {
- this.progress.log(comptime Output.prettyFmt(fmt, false), args);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
+ },
}
} else {
Output.prettyErrorln(fmt, args);
@@ -6699,10 +6708,10 @@ pub const PackageManager = struct {
const args = .{ alias, @errorName(err) };
if (comptime log_level.showProgress()) {
- if (Output.enable_ansi_colors) {
- this.progress.log(comptime Output.prettyFmt(fmt, true), args);
- } else {
- this.progress.log(comptime Output.prettyFmt(fmt, false), args);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
+ },
}
} else {
Output.prettyErrorln(fmt, args);
@@ -6718,61 +6727,56 @@ pub const PackageManager = struct {
}
}
- var scripts = this.lockfile.packages.items(.scripts)[package_id];
- if (scripts.hasAny()) {
- var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
- const path_str = Path.joinAbsString(
- bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable,
- &[_]string{destination_dir_subpath},
- .posix,
- );
+ if (resolution.tag == .workspace or this.lockfile.trusted_dependencies.contains(@truncate(u32, String.Builder.stringHash(name)))) {
+ var scripts = this.lockfile.packages.items(.scripts)[package_id];
+ if (scripts.hasAny()) {
+ var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ const path_str = Path.joinAbsString(
+ bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable,
+ &[_]string{destination_dir_subpath},
+ .posix,
+ );
- scripts.enqueue(this.lockfile, buf, path_str);
- } else if (!scripts.filled and switch (resolution.tag) {
- .folder => Features.folder.scripts,
- .npm => Features.npm.scripts,
- .git, .github, .gitlab, .local_tarball, .remote_tarball => Features.tarball.scripts,
- .symlink => Features.link.scripts,
- .workspace => Features.workspace.scripts,
- else => false,
- }) {
- var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
- const path_str = Path.joinAbsString(
- bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable,
- &[_]string{destination_dir_subpath},
- .posix,
- );
+ scripts.enqueue(this.lockfile, buf, path_str);
+ } else if (!scripts.filled) {
+ var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ const path_str = Path.joinAbsString(
+ bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable,
+ &[_]string{destination_dir_subpath},
+ .posix,
+ );
- scripts.enqueueFromPackageJSON(
- this.manager.log,
- this.lockfile,
- this.node_modules_folder.dir,
- destination_dir_subpath,
- path_str,
- ) catch |err| {
- if (comptime log_level != .silent) {
- const fmt = "\n<r><red>error:<r> failed to parse life-cycle scripts for <b>{s}<r>: {s}\n";
- const args = .{ name, @errorName(err) };
-
- if (comptime log_level.showProgress()) {
- if (Output.enable_ansi_colors) {
- this.progress.log(comptime Output.prettyFmt(fmt, true), args);
+ scripts.enqueueFromPackageJSON(
+ this.manager.log,
+ this.lockfile,
+ this.node_modules_folder.dir,
+ destination_dir_subpath,
+ path_str,
+ ) catch |err| {
+ if (comptime log_level != .silent) {
+ const fmt = "\n<r><red>error:<r> failed to parse life-cycle scripts for <b>{s}<r>: {s}\n";
+ const args = .{ name, @errorName(err) };
+
+ if (comptime log_level.showProgress()) {
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
+ },
+ }
} else {
- this.progress.log(comptime Output.prettyFmt(fmt, false), args);
+ Output.prettyErrorln(fmt, args);
}
- } else {
- Output.prettyErrorln(fmt, args);
}
- }
- if (this.manager.options.enable.fail_early) {
- Global.exit(1);
- }
+ if (this.manager.options.enable.fail_early) {
+ Global.exit(1);
+ }
- Output.flush();
- this.summary.fail += 1;
- return;
- };
+ Output.flush();
+ this.summary.fail += 1;
+ return;
+ };
+ }
}
},
.fail => |cause| {
@@ -7241,10 +7245,10 @@ pub const PackageManager = struct {
const args = .{ name, @errorName(err) };
if (comptime log_level.showProgress()) {
- if (Output.enable_ansi_colors) {
- this.progress.log(comptime Output.prettyFmt(fmt, true), args);
- } else {
- this.progress.log(comptime Output.prettyFmt(fmt, false), args);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
+ },
}
} else {
Output.prettyErrorln(fmt, args);
@@ -7262,10 +7266,10 @@ pub const PackageManager = struct {
const args = .{lockfile.str(&names[package_id])};
if (comptime log_level.showProgress()) {
- if (Output.enable_ansi_colors) {
- this.progress.log(comptime Output.prettyFmt(fmt, true), args);
- } else {
- this.progress.log(comptime Output.prettyFmt(fmt, false), args);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args);
+ },
}
} else {
Output.prettyErrorln(fmt, args);
@@ -7312,8 +7316,8 @@ pub const PackageManager = struct {
}
fn installWithManager(
- ctx: Command.Context,
manager: *PackageManager,
+ ctx: Command.Context,
package_json_contents: string,
comptime log_level: Options.LogLevel,
) !void {
@@ -7326,7 +7330,7 @@ pub const PackageManager = struct {
manager.options.lockfile_path,
)
else
- Lockfile.LoadFromDiskResult{ .not_found = {} };
+ .{ .not_found = {} };
var root = Lockfile.Package{};
var needs_new_lockfile = load_lockfile_result != .ok or (load_lockfile_result.ok.buffers.dependencies.items.len == 0 and manager.package_json_updates.len > 0);
// this defaults to false
@@ -7360,10 +7364,10 @@ pub const PackageManager = struct {
}
if (ctx.log.errors > 0) {
- if (Output.enable_ansi_colors) {
- try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
- } else {
- try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors);
+ },
}
}
Output.flush();
@@ -7567,10 +7571,10 @@ pub const PackageManager = struct {
}
}
- if (Output.enable_ansi_colors) {
- try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
- } else {
- try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors);
+ },
}
if (manager.log.hasErrors()) Global.crash();
@@ -7731,10 +7735,10 @@ pub const PackageManager = struct {
.successfully_installed = install_summary.successfully_installed,
};
- if (Output.enable_ansi_colors) {
- try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), true);
- } else {
- try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), false);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), enable_ansi_colors);
+ },
}
if (!did_meta_hash_change) {
diff --git a/src/install/integrity.zig b/src/install/integrity.zig
index 19e55c223..4634c2dfd 100644
--- a/src/install/integrity.zig
+++ b/src/install/integrity.zig
@@ -117,7 +117,7 @@ pub const Integrity = extern struct {
pub fn parse(buf: []const u8) Tag {
const Matcher = strings.ExactSizeMatcher(8);
- const i = std.mem.indexOfScalar(u8, buf[0..@min(buf.len, 7)], '-') orelse return Tag.unknown;
+ const i = strings.indexOfChar(buf[0..@min(buf.len, 7)], '-') orelse return Tag.unknown;
return switch (Matcher.match(buf[0..i])) {
Matcher.case("sha1") => Tag.sha1,
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index b17691853..a51d2b2ee 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -85,6 +85,7 @@ const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8;
const zero_hash = std.mem.zeroes(MetaHash);
const NameHashMap = std.ArrayHashMapUnmanaged(u32, String, ArrayIdentityContext, false);
+const NameHashSet = std.ArrayHashMapUnmanaged(u32, void, ArrayIdentityContext, false);
// Serialized data
/// The version of the lockfile format, intended to prevent data corruption for format changes.
@@ -103,6 +104,7 @@ allocator: Allocator,
scratch: Scratch = .{},
scripts: Scripts = .{},
+trusted_dependencies: NameHashSet = .{},
workspace_paths: NameHashMap = .{},
const Stream = std.io.FixedBufferStream([]u8);
@@ -113,15 +115,15 @@ pub const Scripts = struct {
cwd: string,
script: string,
};
- const StringArrayList = std.ArrayListUnmanaged(Entry);
+ const Entries = std.ArrayListUnmanaged(Entry);
const RunCommand = @import("../cli/run_command.zig").RunCommand;
- preinstall: StringArrayList = .{},
- install: StringArrayList = .{},
- postinstall: StringArrayList = .{},
- preprepare: StringArrayList = .{},
- prepare: StringArrayList = .{},
- postprepare: StringArrayList = .{},
+ preinstall: Entries = .{},
+ install: Entries = .{},
+ postinstall: Entries = .{},
+ preprepare: Entries = .{},
+ prepare: Entries = .{},
+ postprepare: Entries = .{},
pub fn hasAny(this: *Scripts) bool {
inline for (Package.Scripts.Hooks) |hook| {
@@ -195,6 +197,7 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *log
this.format = FormatVersion.current;
this.scripts = .{};
+ this.trusted_dependencies = .{};
this.workspace_paths = .{};
Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| {
@@ -633,6 +636,7 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
}
pub fn cleanWithLogger(old: *Lockfile, updates: []PackageManager.UpdateRequest, log: *logger.Log) !*Lockfile {
+ const old_trusted_dependencies = old.trusted_dependencies;
const old_scripts = old.scripts;
// We will only shrink the number of packages here.
// never grow
@@ -738,6 +742,7 @@ pub fn cleanWithLogger(old: *Lockfile, updates: []PackageManager.UpdateRequest,
}
}
}
+ new.trusted_dependencies = old_trusted_dependencies;
new.scripts = old_scripts;
return new;
}
@@ -909,10 +914,10 @@ pub const Printer = struct {
}),
}
if (log.errors > 0) {
- if (Output.enable_ansi_colors) {
- try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
- } else {
- try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors);
+ },
}
}
Global.crash();
@@ -1493,6 +1498,7 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) !void {
.allocator = allocator,
.scratch = Scratch.init(allocator),
.scripts = .{},
+ .trusted_dependencies = .{},
.workspace_paths = .{},
};
}
@@ -2438,12 +2444,11 @@ pub const Package = extern struct {
initializeStore();
const json = json_parser.ParseJSONUTF8(&source, log, allocator) catch |err| {
- if (Output.enable_ansi_colors) {
- log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
- } else {
- log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
+ switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| {
+ log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {};
+ },
}
-
Output.prettyErrorln("<r><red>{s}<r> parsing package.json in <b>\"{s}\"<r>", .{ @errorName(err), source.path.prettyDir() });
Global.crash();
};
@@ -2956,9 +2961,7 @@ pub const Package = extern struct {
}
}
- if (comptime features.scripts) {
- Package.Scripts.parseCount(allocator, &string_builder, json);
- }
+ Package.Scripts.parseCount(allocator, &string_builder, json);
if (comptime ResolverContext != void) {
resolver.count(*Lockfile.StringBuilder, &string_builder, json);
@@ -3113,6 +3116,37 @@ pub const Package = extern struct {
}
}
+ if (comptime features.trusted_dependencies) {
+ if (json.asProperty("trustedDependencies")) |q| {
+ switch (q.expr.data) {
+ .e_array => |arr| {
+ try lockfile.trusted_dependencies.ensureUnusedCapacity(allocator, arr.items.len);
+ for (arr.slice()) |item| {
+ const name = item.asString(allocator) orelse {
+ log.addErrorFmt(&source, q.loc, allocator,
+ \\trustedDependencies expects an array of strings, e.g.
+ \\"trustedDependencies": [
+ \\ "package_name"
+ \\]
+ , .{}) catch {};
+ return error.InvalidPackageJSON;
+ };
+ lockfile.trusted_dependencies.putAssumeCapacity(@truncate(u32, String.Builder.stringHash(name)), {});
+ }
+ },
+ else => {
+ log.addErrorFmt(&source, q.loc, allocator,
+ \\trustedDependencies expects an array of strings, e.g.
+ \\"trustedDependencies": [
+ \\ "package_name"
+ \\]
+ , .{}) catch {};
+ return error.InvalidPackageJSON;
+ },
+ }
+ }
+ }
+
try string_builder.allocate();
try lockfile.buffers.dependencies.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count);
try lockfile.buffers.resolutions.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count);
@@ -3233,9 +3267,7 @@ pub const Package = extern struct {
}
}
- if (comptime features.scripts) {
- package.scripts.parseAlloc(allocator, &string_builder, json);
- }
+ package.scripts.parseAlloc(allocator, &string_builder, json);
package.scripts.filled = true;
// It is allowed for duplicate dependencies to exist in optionalDependencies and regular dependencies
@@ -3511,6 +3543,7 @@ pub fn deinit(this: *Lockfile) void {
this.packages.deinit(this.allocator);
this.string_pool.deinit();
this.scripts.deinit(this.allocator);
+ this.trusted_dependencies.deinit(this.allocator);
this.workspace_paths.deinit(this.allocator);
}
diff --git a/src/install/npm.zig b/src/install/npm.zig
index 074041056..6edb6dcb4 100644
--- a/src/install/npm.zig
+++ b/src/install/npm.zig
@@ -80,14 +80,14 @@ pub const Registry = struct {
url.path = pathname;
}
- while (std.mem.lastIndexOfScalar(u8, pathname, ':')) |colon| {
+ while (strings.lastIndexOfChar(pathname, ':')) |colon| {
var segment = pathname[colon + 1 ..];
pathname = pathname[0..colon];
if (pathname.len > 1 and pathname[pathname.len - 1] == '/') {
pathname = pathname[0 .. pathname.len - 1];
}
- const eql_i = std.mem.indexOfScalar(u8, segment, '=') orelse continue;
+ const eql_i = strings.indexOfChar(segment, '=') orelse continue;
var value = segment[eql_i + 1 ..];
segment = segment[0..eql_i];
@@ -847,11 +847,11 @@ pub const PackageManifest = struct {
for (versions) |prop| {
const version_name = prop.key.?.asString(allocator) orelse continue;
- if (std.mem.indexOfScalar(u8, version_name, '-') != null) {
+ if (strings.indexOfChar(version_name, '-') != null) {
pre_versions_len += 1;
extern_string_count += 1;
} else {
- extern_string_count += @as(usize, @intFromBool(std.mem.indexOfScalar(u8, version_name, '+') != null));
+ extern_string_count += @as(usize, @intFromBool(strings.indexOfChar(version_name, '+') != null));
release_versions_len += 1;
}
diff --git a/src/install/repository.zig b/src/install/repository.zig
index c4b68d9be..6546481e9 100644
--- a/src/install/repository.zig
+++ b/src/install/repository.zig
@@ -94,7 +94,7 @@ pub const Repository = extern struct {
if (!formatter.repository.resolved.isEmpty()) {
try writer.writeAll("#");
var resolved = formatter.repository.resolved.slice(formatter.buf);
- if (std.mem.lastIndexOfScalar(u8, resolved, '-')) |i| {
+ if (strings.lastIndexOfChar(resolved, '-')) |i| {
resolved = resolved[i + 1 ..];
}
try writer.writeAll(resolved);
diff --git a/src/logger.zig b/src/logger.zig
index 621e643b5..3279e9fd5 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -860,10 +860,9 @@ pub const Log = struct {
}
inline fn allocPrint(allocator: std.mem.Allocator, comptime fmt: string, args: anytype) !string {
- return if (Output.enable_ansi_colors)
- try std.fmt.allocPrint(allocator, Output.prettyFmt(fmt, true), args)
- else
- try std.fmt.allocPrint(allocator, Output.prettyFmt(fmt, false), args);
+ return try switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| std.fmt.allocPrint(allocator, Output.prettyFmt(fmt, enable_ansi_colors), args),
+ };
}
inline fn _addResolveErrorWithLevel(
@@ -1174,11 +1173,9 @@ pub const Log = struct {
}
pub fn printForLogLevel(self: *Log, to: anytype) !void {
- if (Output.enable_ansi_colors) {
- return self.printForLogLevelWithEnableAnsiColors(to, true);
- } else {
- return self.printForLogLevelWithEnableAnsiColors(to, false);
- }
+ return switch (Output.enable_ansi_colors) {
+ inline else => |enable_ansi_colors| self.printForLogLevelWithEnableAnsiColors(to, enable_ansi_colors),
+ };
}
pub fn printForLogLevelWithEnableAnsiColors(self: *Log, to: anytype, comptime enable_ansi_colors: bool) !void {
diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts
index 6baee23a9..595e7bcdd 100644
--- a/test/cli/install/bun-install.test.ts
+++ b/test/cli/install/bun-install.test.ts
@@ -1,7 +1,7 @@
import { file, listen, Socket, spawn } from "bun";
import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test";
import { bunExe, bunEnv as env } from "harness";
-import { access, mkdir, readlink, rm, writeFile } from "fs/promises";
+import { access, mkdir, readlink, realpath, rm, writeFile } from "fs/promises";
import { join } from "path";
import {
dummyAfterAll,
@@ -4473,3 +4473,75 @@ cache = false
expect(await file(join(package_dir, "package.json")).text()).toEqual(foo_package);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([]);
}, 20000);
+
+it("should handle trustedDependencies", async () => {
+ const scripts = {
+ preinstall: `${bunExe()} echo.js preinstall`,
+ install: `${bunExe()} echo.js install`,
+ postinstall: `${bunExe()} echo.js postinstall`,
+ preprepare: `${bunExe()} echo.js preprepare`,
+ prepare: `${bunExe()} echo.js prepare`,
+ postprepare: `${bunExe()} echo.js postprepare`,
+ };
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "foo",
+ version: "0.1.0",
+ dependencies: {
+ bar: "file:./bar",
+ moo: "file:./moo",
+ },
+ trustedDependencies: ["moo"],
+ }),
+ );
+ await mkdir(join(package_dir, "bar"));
+ const bar_package = JSON.stringify({
+ name: "bar",
+ version: "0.2.0",
+ scripts,
+ });
+ await writeFile(join(package_dir, "bar", "package.json"), bar_package);
+ await writeFile(join(package_dir, "bar", "echo.js"), "console.log(`bar|${process.argv[2]}|${import.meta.dir}`);");
+ await mkdir(join(package_dir, "moo"));
+ const moo_package = JSON.stringify({
+ name: "moo",
+ version: "0.3.0",
+ scripts,
+ });
+ await writeFile(join(package_dir, "moo", "package.json"), moo_package);
+ await writeFile(join(package_dir, "moo", "echo.js"), "console.log(`moo|${process.argv[2]}|${import.meta.dir}`);");
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ const moo_dir = await realpath(join(package_dir, "node_modules", "moo"));
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ `moo|preinstall|${moo_dir}`,
+ " + bar@bar",
+ " + moo@moo",
+ `moo|install|${moo_dir}`,
+ `moo|postinstall|${moo_dir}`,
+ `moo|preprepare|${moo_dir}`,
+ `moo|prepare|${moo_dir}`,
+ `moo|postprepare|${moo_dir}`,
+ "",
+ " 2 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "bar", "moo"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["echo.js", "package.json"]);
+ expect(await file(join(package_dir, "node_modules", "bar", "package.json")).text()).toEqual(bar_package);
+ expect(await readdirSorted(join(package_dir, "node_modules", "moo"))).toEqual(["echo.js", "package.json"]);
+ expect(await file(join(package_dir, "node_modules", "moo", "package.json")).text()).toEqual(moo_package);
+ await access(join(package_dir, "bun.lockb"));
+});