diff options
Diffstat (limited to 'src/install')
-rw-r--r-- | src/install/bin.zig | 32 | ||||
-rw-r--r-- | src/install/dependency.zig | 48 | ||||
-rw-r--r-- | src/install/extract_tarball.zig | 137 | ||||
-rw-r--r-- | src/install/install.zig | 556 | ||||
-rw-r--r-- | src/install/integrity.zig | 6 | ||||
-rw-r--r-- | src/install/lockfile.zig | 244 | ||||
-rw-r--r-- | src/install/npm.zig | 58 | ||||
-rw-r--r-- | src/install/repository.zig | 2 | ||||
-rw-r--r-- | src/install/resolution.zig | 2 | ||||
-rw-r--r-- | src/install/resolvers/folder_resolver.zig | 6 | ||||
-rw-r--r-- | src/install/semver.zig | 24 |
11 files changed, 610 insertions, 505 deletions
diff --git a/src/install/bin.zig b/src/install/bin.zig index 371ce0dde..f8117c1e8 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -281,7 +281,7 @@ pub const Bin = extern struct { if (name[0] != '@') return name; var name_ = name; name_ = name[1..]; - return name_[(std.mem.indexOfScalar(u8, name_, '/') orelse return name) + 1 ..]; + return name_[(strings.indexOfChar(name_, '/') orelse return name) + 1 ..]; } fn setPermissions(folder: std.os.fd_t, target: [:0]const u8) void { @@ -382,7 +382,7 @@ pub const Bin = extern struct { bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; - const target_len = @ptrToInt(remain.ptr) - @ptrToInt(&dest_buf); + const target_len = @intFromPtr(remain.ptr) - @intFromPtr(&dest_buf); remain = remain[1..]; var target_path: [:0]u8 = dest_buf[0..target_len :0]; @@ -392,7 +392,7 @@ pub const Bin = extern struct { bun.copy(u8, from_remain, unscoped_name); from_remain = from_remain[unscoped_name.len..]; from_remain[0] = 0; - var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; + var dest_path: [:0]u8 = target_buf[0 .. @intFromPtr(from_remain.ptr) - @intFromPtr(&target_buf) :0]; this.setSimlinkAndPermissions(target_path, dest_path); }, @@ -404,7 +404,7 @@ pub const Bin = extern struct { bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; - const target_len = @ptrToInt(remain.ptr) - @ptrToInt(&dest_buf); + const target_len = @intFromPtr(remain.ptr) - @intFromPtr(&dest_buf); remain = remain[1..]; var target_path: [:0]u8 = dest_buf[0..target_len :0]; @@ -412,7 +412,7 @@ pub const Bin = extern struct { bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; - var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; + var dest_path: [:0]u8 = target_buf[0 .. @intFromPtr(from_remain.ptr) - @intFromPtr(&target_buf) :0]; this.setSimlinkAndPermissions(target_path, dest_path); }, @@ -434,7 +434,7 @@ pub const Bin = extern struct { bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; - const target_len = @ptrToInt(remain.ptr) - @ptrToInt(&dest_buf); + const target_len = @intFromPtr(remain.ptr) - @intFromPtr(&dest_buf); remain = remain[1..]; var target_path: [:0]u8 = dest_buf[0..target_len :0]; @@ -442,7 +442,7 @@ pub const Bin = extern struct { bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; - var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; + var dest_path: [:0]u8 = target_buf[0 .. @intFromPtr(from_remain.ptr) - @intFromPtr(&target_buf) :0]; this.setSimlinkAndPermissions(target_path, dest_path); } @@ -461,7 +461,7 @@ pub const Bin = extern struct { var dir = std.fs.Dir{ .fd = this.package_installed_node_modules }; var joined = Path.joinStringBuf(&target_buf, &parts, .auto); - @intToPtr([*]u8, @ptrToInt(joined.ptr))[joined.len] = 0; + @ptrFromInt([*]u8, @intFromPtr(joined.ptr))[joined.len] = 0; var joined_: [:0]const u8 = joined.ptr[0..joined.len :0]; var child_dir = bun.openDir(dir, joined_) catch |err| { this.err = err; @@ -482,12 +482,12 @@ pub const Bin = extern struct { while (iter.next() catch null) |entry_| { const entry: std.fs.IterableDir.Entry = entry_; switch (entry.kind) { - std.fs.IterableDir.Entry.Kind.SymLink, std.fs.IterableDir.Entry.Kind.File => { + std.fs.IterableDir.Entry.Kind.sym_link, std.fs.IterableDir.Entry.Kind.file => { target_buf_remain = prev_target_buf_remain; bun.copy(u8, target_buf_remain, entry.name); target_buf_remain = target_buf_remain[entry.name.len..]; target_buf_remain[0] = 0; - var from_path: [:0]u8 = target_buf[0 .. @ptrToInt(target_buf_remain.ptr) - @ptrToInt(&target_buf) :0]; + var from_path: [:0]u8 = target_buf[0 .. @intFromPtr(target_buf_remain.ptr) - @intFromPtr(&target_buf) :0]; var to_path = if (!link_global) std.fmt.bufPrintZ(&dest_buf, dot_bin ++ "{s}", .{entry.name}) catch continue else @@ -519,7 +519,7 @@ pub const Bin = extern struct { return; } - @memcpy(&target_buf, this.global_bin_path.ptr, this.global_bin_path.len); + @memcpy(target_buf[0..this.global_bin_path.len], this.global_bin_path); from_remain = target_buf[this.global_bin_path.len..]; from_remain[0] = std.fs.path.sep; from_remain = from_remain[1..]; @@ -557,7 +557,7 @@ pub const Bin = extern struct { bun.copy(u8, from_remain, unscoped_name); from_remain = from_remain[unscoped_name.len..]; from_remain[0] = 0; - var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; + var dest_path: [:0]u8 = target_buf[0 .. @intFromPtr(from_remain.ptr) - @intFromPtr(&target_buf) :0]; std.os.unlinkatZ(this.root_node_modules_folder, dest_path, 0) catch {}; }, @@ -566,7 +566,7 @@ pub const Bin = extern struct { bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; - var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; + var dest_path: [:0]u8 = target_buf[0 .. @intFromPtr(from_remain.ptr) - @intFromPtr(&target_buf) :0]; std.os.unlinkatZ(this.root_node_modules_folder, dest_path, 0) catch {}; }, @@ -594,7 +594,7 @@ pub const Bin = extern struct { bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; - var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; + var dest_path: [:0]u8 = target_buf[0 .. @intFromPtr(from_remain.ptr) - @intFromPtr(&target_buf) :0]; std.os.unlinkatZ(this.root_node_modules_folder, dest_path, 0) catch {}; } @@ -613,7 +613,7 @@ pub const Bin = extern struct { var dir = std.fs.Dir{ .fd = this.package_installed_node_modules }; var joined = Path.joinStringBuf(&target_buf, &parts, .auto); - @intToPtr([*]u8, @ptrToInt(joined.ptr))[joined.len] = 0; + @ptrFromInt([*]u8, @intFromPtr(joined.ptr))[joined.len] = 0; var joined_: [:0]const u8 = joined.ptr[0..joined.len :0]; var child_dir = bun.openDir(dir, joined_) catch |err| { this.err = err; @@ -634,7 +634,7 @@ pub const Bin = extern struct { while (iter.next() catch null) |entry_| { const entry: std.fs.IterableDir.Entry = entry_; switch (entry.kind) { - std.fs.IterableDir.Entry.Kind.SymLink, std.fs.IterableDir.Entry.Kind.File => { + std.fs.IterableDir.Entry.Kind.sym_link, std.fs.IterableDir.Entry.Kind.file => { target_buf_remain = prev_target_buf_remain; bun.copy(u8, target_buf_remain, entry.name); target_buf_remain = target_buf_remain[entry.name.len..]; diff --git a/src/install/dependency.zig b/src/install/dependency.zig index 0480dd9d8..6ef02bbfc 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -147,7 +147,7 @@ pub fn toDependency( return Dependency{ .name = name, .name_hash = @bitCast(u64, this[8..16].*), - .behavior = @intToEnum(Dependency.Behavior, this[16]), + .behavior = @enumFromInt(Dependency.Behavior, this[16]), .version = Dependency.Version.toVersion(name, this[17..this.len].*, ctx), }; } @@ -156,7 +156,7 @@ pub fn toExternal(this: Dependency) External { var bytes: External = undefined; bytes[0..this.name.bytes.len].* = this.name.bytes; bytes[8..16].* = @bitCast([8]u8, this.name_hash); - bytes[16] = @enumToInt(this.behavior); + bytes[16] = @intFromEnum(this.behavior); bytes[17..bytes.len].* = this.version.toExternal(); return bytes; } @@ -265,7 +265,7 @@ pub const Version = struct { ctx: Dependency.Context, ) Dependency.Version { const slice = String{ .bytes = bytes[1..9].* }; - const tag = @intToEnum(Dependency.Version.Tag, bytes[0]); + const tag = @enumFromInt(Dependency.Version.Tag, bytes[0]); const sliced = &slice.sliced(ctx.buffer); return Dependency.parseWithTag( ctx.allocator, @@ -279,7 +279,7 @@ pub const Version = struct { pub inline fn toExternal(this: Version) Version.External { var bytes: Version.External = undefined; - bytes[0] = @enumToInt(this.tag); + bytes[0] = @intFromEnum(this.tag); bytes[1..9].* = this.literal.bytes; return bytes; } @@ -339,7 +339,7 @@ pub const Version = struct { github = 8, pub inline fn isNPM(this: Tag) bool { - return @enumToInt(this) < 3; + return @intFromEnum(this) < 3; } pub fn infer(dependency: string) Tag { @@ -493,7 +493,7 @@ pub const Version = struct { // npm:package@1.2.3 'n' => { if (strings.hasPrefixComptime(dependency, "npm:") and dependency.len > "npm:".len) { - const remain = dependency["npm:".len + @boolToInt(dependency["npm:".len] == '@') ..]; + const remain = dependency["npm:".len + @intFromBool(dependency["npm:".len] == '@') ..]; for (remain, 0..) |c, i| { if (c == '@') { return infer(remain[i + 1 ..]); @@ -645,7 +645,7 @@ pub fn parseWithTag( var input = dependency; const name = if (strings.hasPrefixComptime(input, "npm:")) sliced.sub(brk: { var str = input["npm:".len..]; - var i: usize = @boolToInt(str.len > 0 and str[0] == '@'); + var i: usize = @intFromBool(str.len > 0 and str[0] == '@'); while (i < str.len) : (i += 1) { if (str[i] == '@') { @@ -694,7 +694,7 @@ pub fn parseWithTag( // npm:@foo/bar@latest // ^ - i += @boolToInt(dependency[i] == '@'); + i += @intFromBool(dependency[i] == '@'); while (i < dependency.len) : (i += 1) { // npm:@foo/bar@latest @@ -899,67 +899,67 @@ pub const Behavior = enum(u8) { pub const workspace: u8 = 1 << 5; pub inline fn isNormal(this: Behavior) bool { - return (@enumToInt(this) & Behavior.normal) != 0; + return (@intFromEnum(this) & Behavior.normal) != 0; } pub inline fn isOptional(this: Behavior) bool { - return (@enumToInt(this) & Behavior.optional) != 0 and !this.isPeer(); + return (@intFromEnum(this) & Behavior.optional) != 0 and !this.isPeer(); } pub inline fn isDev(this: Behavior) bool { - return (@enumToInt(this) & Behavior.dev) != 0; + return (@intFromEnum(this) & Behavior.dev) != 0; } pub inline fn isPeer(this: Behavior) bool { - return (@enumToInt(this) & Behavior.peer) != 0; + return (@intFromEnum(this) & Behavior.peer) != 0; } pub inline fn isWorkspace(this: Behavior) bool { - return (@enumToInt(this) & Behavior.workspace) != 0; + return (@intFromEnum(this) & Behavior.workspace) != 0; } pub inline fn setNormal(this: Behavior, value: bool) Behavior { if (value) { - return @intToEnum(Behavior, @enumToInt(this) | Behavior.normal); + return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.normal); } else { - return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.normal); + return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.normal); } } pub inline fn setOptional(this: Behavior, value: bool) Behavior { if (value) { - return @intToEnum(Behavior, @enumToInt(this) | Behavior.optional); + return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.optional); } else { - return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.optional); + return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.optional); } } pub inline fn setDev(this: Behavior, value: bool) Behavior { if (value) { - return @intToEnum(Behavior, @enumToInt(this) | Behavior.dev); + return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.dev); } else { - return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.dev); + return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.dev); } } pub inline fn setPeer(this: Behavior, value: bool) Behavior { if (value) { - return @intToEnum(Behavior, @enumToInt(this) | Behavior.peer); + return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.peer); } else { - return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.peer); + return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.peer); } } pub inline fn setWorkspace(this: Behavior, value: bool) Behavior { if (value) { - return @intToEnum(Behavior, @enumToInt(this) | Behavior.workspace); + return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.workspace); } else { - return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.workspace); + return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.workspace); } } pub inline fn cmp(lhs: Behavior, rhs: Behavior) std.math.Order { - if (@enumToInt(lhs) == @enumToInt(rhs)) { + if (@intFromEnum(lhs) == @intFromEnum(rhs)) { return .eq; } diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 0e5f8e188..3be00853f 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -157,13 +157,14 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD var tmpdir = this.temp_dir; var tmpname_buf: [256]u8 = undefined; const name = this.name.slice(); - - var basename = this.name.slice(); - if (basename[0] == '@') { - if (std.mem.indexOfScalar(u8, basename, '/')) |i| { - basename = basename[i + 1 ..]; + const basename = brk: { + if (name[0] == '@') { + if (strings.indexOfChar(name, '/')) |i| { + break :brk name[i + 1 ..]; + } } - } + break :brk name; + }; var resolved: string = ""; var tmpname = try FileSystem.instance.tmpname(basename[0..@min(basename.len, 32)], &tmpname_buf, tgz_bytes.len); @@ -216,8 +217,8 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD }; var dirname_reader = DirnameReader{ .outdirname = &resolved }; - _ = if (PackageManager.verbose_install) - try Archive.extractToDir( + switch (PackageManager.verbose_install) { + inline else => |log| _ = try Archive.extractToDir( zlib_pool.data.list.items, extract_destination, null, @@ -226,20 +227,9 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD // for GitHub tarballs, the root dir is always <user>-<repo>-<commit_id> 1, true, - true, - ) - else - try Archive.extractToDir( - zlib_pool.data.list.items, - extract_destination, - null, - *DirnameReader, - &dirname_reader, - // for GitHub tarballs, the root dir is always <user>-<repo>-<commit_id> - 1, - true, - false, - ); + log, + ), + } // This tag is used to know which version of the package was // installed from GitHub. package.json version becomes sort of @@ -252,31 +242,18 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD }; } }, - else => { - _ = if (PackageManager.verbose_install) - try Archive.extractToDir( - zlib_pool.data.list.items, - extract_destination, - null, - void, - {}, - // for npm packages, the root dir is always "package" - 1, - true, - true, - ) - else - try Archive.extractToDir( - zlib_pool.data.list.items, - extract_destination, - null, - void, - {}, - // for npm packages, the root dir is always "package" - 1, - true, - false, - ); + else => switch (PackageManager.verbose_install) { + inline else => |log| _ = try Archive.extractToDir( + zlib_pool.data.list.items, + extract_destination, + null, + void, + {}, + // for npm packages, the root dir is always "package" + 1, + true, + log, + ), }, } @@ -343,7 +320,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD }; // create an index storing each version of a package installed - if (std.mem.indexOfScalar(u8, basename, '/') == null) create_index: { + if (strings.indexOfChar(basename, '/') == null) create_index: { var index_dir = cache_dir.makeOpenPathIterable(name, .{}) catch break :create_index; defer index_dir.close(); index_dir.dir.symLink( @@ -361,39 +338,39 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD var json_path: []u8 = ""; var json_buf: []u8 = ""; var json_len: usize = 0; - switch (this.resolution.tag) { - .github, .local_tarball, .remote_tarball => { - const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| { - this.package_manager.log.addErrorFmt( - null, - logger.Loc.Empty, - this.package_manager.allocator, - "\"package.json\" for \"{s}\" failed to open: {s}", - .{ name, @errorName(err) }, - ) catch unreachable; - return error.InstallFailed; - }; - defer json_file.close(); - const json_stat = try json_file.stat(); - json_buf = try this.package_manager.allocator.alloc(u8, json_stat.size + 64); - json_len = try json_file.preadAll(json_buf, 0); + if (switch (this.resolution.tag) { + // TODO remove extracted files not matching any globs under "files" + .github, .local_tarball, .remote_tarball => true, + else => this.package_manager.lockfile.trusted_dependencies.contains(@truncate(u32, Semver.String.Builder.stringHash(name))), + }) { + const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| { + this.package_manager.log.addErrorFmt( + null, + logger.Loc.Empty, + this.package_manager.allocator, + "\"package.json\" for \"{s}\" failed to open: {s}", + .{ name, @errorName(err) }, + ) catch unreachable; + return error.InstallFailed; + }; + defer json_file.close(); + const json_stat = try json_file.stat(); + json_buf = try this.package_manager.allocator.alloc(u8, json_stat.size + 64); + json_len = try json_file.preadAll(json_buf, 0); - json_path = bun.getFdPath( - json_file.handle, - &json_path_buf, - ) catch |err| { - this.package_manager.log.addErrorFmt( - null, - logger.Loc.Empty, - this.package_manager.allocator, - "\"package.json\" for \"{s}\" failed to resolve: {s}", - .{ name, @errorName(err) }, - ) catch unreachable; - return error.InstallFailed; - }; - // TODO remove extracted files not matching any globs under "files" - }, - else => {}, + json_path = bun.getFdPath( + json_file.handle, + &json_path_buf, + ) catch |err| { + this.package_manager.log.addErrorFmt( + null, + logger.Loc.Empty, + this.package_manager.allocator, + "\"package.json\" for \"{s}\" failed to resolve: {s}", + .{ name, @errorName(err) }, + ) catch unreachable; + return error.InstallFailed; + }; } const ret_json_path = try FileSystem.instance.dirname_store.append(@TypeOf(json_path), json_path); diff --git a/src/install/install.zig b/src/install/install.zig index 88c8f653f..65f4a35e9 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -146,12 +146,12 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type pub fn init(buf: []const Type, in: []const Type) Slice { // if (comptime Environment.allow_assert) { - // std.debug.assert(@ptrToInt(buf.ptr) <= @ptrToInt(in.ptr)); - // std.debug.assert((@ptrToInt(in.ptr) + in.len) <= (@ptrToInt(buf.ptr) + buf.len)); + // std.debug.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr)); + // std.debug.assert((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len)); // } return Slice{ - .off = @truncate(u32, (@ptrToInt(in.ptr) - @ptrToInt(buf.ptr)) / @sizeOf(Type)), + .off = @truncate(u32, (@intFromPtr(in.ptr) - @intFromPtr(buf.ptr)) / @sizeOf(Type)), .len = @truncate(u32, in.len), }; } @@ -166,8 +166,8 @@ pub const ExternalStringList = ExternalSlice(ExternalString); pub const VersionSlice = ExternalSlice(Semver.Version); pub const ExternalStringMap = extern struct { - name: ExternalStringList = ExternalStringList{}, - value: ExternalStringList = ExternalStringList{}, + name: ExternalStringList = .{}, + value: ExternalStringList = .{}, }; pub const PackageNameHash = u64; @@ -183,7 +183,7 @@ pub const Aligner = struct { } pub inline fn skipAmount(comptime Type: type, pos: usize) usize { - return std.mem.alignForward(pos, @alignOf(Type)) - pos; + return std.mem.alignForward(usize, pos, @alignOf(Type)) - pos; } }; @@ -351,7 +351,7 @@ const NetworkTask = struct { }, ); header_builder.header_count = 1; - header_builder.content = GlobalStringBuilder{ .ptr = @intToPtr([*]u8, @ptrToInt(bun.span(default_headers_buf).ptr)), .len = default_headers_buf.len, .cap = default_headers_buf.len }; + header_builder.content = GlobalStringBuilder{ .ptr = @ptrFromInt([*]u8, @intFromPtr(bun.span(default_headers_buf).ptr)), .len = default_headers_buf.len, .cap = default_headers_buf.len }; } this.response_buffer = try MutableString.init(allocator, 0); @@ -467,19 +467,19 @@ pub const Features = struct { is_main: bool = false, optional_dependencies: bool = false, peer_dependencies: bool = true, - scripts: bool = false, + trusted_dependencies: bool = false, workspaces: bool = false, check_for_duplicate_dependencies: bool = false, pub fn behavior(this: Features) Behavior { var out: u8 = 0; - out |= @as(u8, @boolToInt(this.dependencies)) << 1; - out |= @as(u8, @boolToInt(this.optional_dependencies)) << 2; - out |= @as(u8, @boolToInt(this.dev_dependencies)) << 3; - out |= @as(u8, @boolToInt(this.peer_dependencies)) << 4; - out |= @as(u8, @boolToInt(this.workspaces)) << 5; - return @intToEnum(Behavior, out); + out |= @as(u8, @intFromBool(this.dependencies)) << 1; + out |= @as(u8, @intFromBool(this.optional_dependencies)) << 2; + out |= @as(u8, @intFromBool(this.dev_dependencies)) << 3; + out |= @as(u8, @intFromBool(this.peer_dependencies)) << 4; + out |= @as(u8, @intFromBool(this.workspaces)) << 5; + return @enumFromInt(Behavior, out); } pub const main = Features{ @@ -487,7 +487,7 @@ pub const Features = struct { .dev_dependencies = true, .is_main = true, .optional_dependencies = true, - .scripts = true, + .trusted_dependencies = true, .workspaces = true, }; @@ -499,7 +499,7 @@ pub const Features = struct { pub const workspace = Features{ .dev_dependencies = true, .optional_dependencies = true, - .scripts = true, + .trusted_dependencies = true, }; pub const link = Features{ @@ -541,7 +541,7 @@ const Task = struct { /// An ID that lets us register a callback without keeping the same pointer around pub const Id = struct { pub fn forNPMPackage(package_name: string, package_version: Semver.Version) u64 { - var hasher = std.hash.Wyhash.init(0); + var hasher = bun.Wyhash.init(0); hasher.update(package_name); hasher.update("@"); hasher.update(std.mem.asBytes(&package_version)); @@ -549,28 +549,28 @@ const Task = struct { } pub fn forBinLink(package_id: PackageID) u64 { - const hash = std.hash.Wyhash.hash(0, std.mem.asBytes(&package_id)); + const hash = bun.Wyhash.hash(0, std.mem.asBytes(&package_id)); return @as(u64, 1 << 61) | @as(u64, @truncate(u61, hash)); } pub fn forManifest(name: string) u64 { - return @as(u64, 2 << 61) | @as(u64, @truncate(u61, std.hash.Wyhash.hash(0, name))); + return @as(u64, 2 << 61) | @as(u64, @truncate(u61, bun.Wyhash.hash(0, name))); } pub fn forTarball(url: string) u64 { - var hasher = std.hash.Wyhash.init(0); + var hasher = bun.Wyhash.init(0); hasher.update(url); return @as(u64, 3 << 61) | @as(u64, @truncate(u61, hasher.final())); } pub fn forGitClone(url: string) u64 { - var hasher = std.hash.Wyhash.init(0); + var hasher = bun.Wyhash.init(0); hasher.update(url); return @as(u64, 4 << 61) | @as(u64, @truncate(u61, hasher.final())); } pub fn forGitCheckout(url: string, resolved: string) u64 { - var hasher = std.hash.Wyhash.init(0); + var hasher = bun.Wyhash.init(0); hasher.update(url); hasher.update("@"); hasher.update(resolved); @@ -1079,10 +1079,10 @@ const PackageInstall = struct { var stackpath: [bun.MAX_PATH_BYTES]u8 = undefined; while (try walker.next()) |entry| { switch (entry.kind) { - .Directory => { + .directory => { std.os.mkdirat(destination_dir_.dir.fd, entry.path, 0o755) catch {}; }, - .File => { + .file => { bun.copy(u8, &stackpath, entry.path); stackpath[entry.path.len] = 0; var path: [:0]u8 = stackpath[0..entry.path.len :0]; @@ -1190,7 +1190,7 @@ const PackageInstall = struct { ) !u32 { var real_file_count: u32 = 0; while (try walker.next()) |entry| { - if (entry.kind != .File) continue; + if (entry.kind != .file) continue; real_file_count += 1; var outfile = destination_dir_.createFile(entry.path, .{}) catch brk: { @@ -1268,10 +1268,10 @@ const PackageInstall = struct { var real_file_count: u32 = 0; while (try walker.next()) |entry| { switch (entry.kind) { - .Directory => { + .directory => { std.os.mkdirat(destination_dir_.dir.fd, entry.path, 0o755) catch {}; }, - .File => { + .file => { try std.os.linkat(entry.dir.dir.fd, entry.basename, destination_dir_.dir.fd, entry.path, 0); real_file_count += 1; }, @@ -1351,16 +1351,16 @@ const PackageInstall = struct { while (try walker.next()) |entry| { switch (entry.kind) { // directories are created - .Directory => { + .directory => { std.os.mkdirat(dest_dir_fd, entry.path, 0o755) catch {}; }, // but each file in the directory is a symlink - .File => { - @memcpy(remain.ptr, entry.path.ptr, entry.path.len); + .file => { + @memcpy(remain[0..entry.path.len], entry.path); remain[entry.path.len] = 0; var from_path = buf[0 .. cache_dir_offset + entry.path.len :0]; - @memcpy(dest_remaining.ptr, entry.path.ptr, entry.path.len); + @memcpy(dest_remaining[0..entry.path.len], entry.path); dest_remaining[entry.path.len] = 0; var to_path = dest_buf[0 .. dest_dir_offset + entry.path.len :0]; @@ -1718,9 +1718,8 @@ pub const PackageManager = struct { } pub fn wake(this: *PackageManager) void { - if (this.onWake.context != null) { - this.onWake.getHandler()(this.onWake.context.?, this); - return; + if (this.onWake.context) |ctx| { + this.onWake.getHandler()(ctx, this); } _ = this.wait_count.fetchAdd(1, .Monotonic); @@ -1743,60 +1742,81 @@ pub const PackageManager = struct { pub fn enqueueDependencyToRoot( this: *PackageManager, name: []const u8, - version_buf: []const u8, version: *const Dependency.Version, + version_buf: []const u8, behavior: Dependency.Behavior, ) DependencyToEnqueue { - const str_buf = this.lockfile.buffers.string_bytes.items; - for (this.lockfile.buffers.dependencies.items, 0..) |dependency, dependency_id| { - if (!strings.eqlLong(dependency.name.slice(str_buf), name, true)) continue; - if (!dependency.version.eql(version, str_buf, version_buf)) continue; - return switch (this.lockfile.buffers.resolutions.items[dependency_id]) { - invalid_package_id => .{ - .pending = @truncate(DependencyID, dependency_id), - }, - else => |resolution_id| .{ - .resolution = .{ - .resolution = this.lockfile.packages.items(.resolution)[resolution_id], - .package_id = resolution_id, - }, - }, + const dep_id = @truncate(DependencyID, brk: { + const str_buf = this.lockfile.buffers.string_bytes.items; + for (this.lockfile.buffers.dependencies.items, 0..) |dep, id| { + if (!strings.eqlLong(dep.name.slice(str_buf), name, true)) continue; + if (!dep.version.eql(version, str_buf, version_buf)) continue; + break :brk id; + } + + var builder = this.lockfile.stringBuilder(); + const dummy = Dependency{ + .name = String.init(name, name), + .name_hash = String.Builder.stringHash(name), + .version = version.*, + .behavior = behavior, }; - } + dummy.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder); - var builder = this.lockfile.stringBuilder(); - const dependency = Dependency{ - .name = String.init(name, name), - .name_hash = String.Builder.stringHash(name), - .version = version.*, - .behavior = behavior, - }; - dependency.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder); - - builder.allocate() catch |err| return .{ .failure = err }; - - const cloned_dependency = dependency.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable; - builder.clamp(); - const index = @truncate(DependencyID, this.lockfile.buffers.dependencies.items.len); - this.lockfile.buffers.dependencies.append(this.allocator, cloned_dependency) catch unreachable; - this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable; - if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len); - this.enqueueDependencyWithMainAndSuccessFn( - index, - &cloned_dependency, - invalid_package_id, - assignRootResolution, - failRootResolution, - ) catch |err| { - return .{ .failure = err }; - }; + builder.allocate() catch |err| return .{ .failure = err }; - const resolution_id = this.lockfile.buffers.resolutions.items[index]; + const dep = dummy.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable; + builder.clamp(); + const index = this.lockfile.buffers.dependencies.items.len; + this.lockfile.buffers.dependencies.append(this.allocator, dep) catch unreachable; + this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable; + if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len); + break :brk index; + }); - // check if we managed to synchronously resolve the dependency - if (resolution_id == invalid_package_id) return .{ .pending = index }; + if (this.lockfile.buffers.resolutions.items[dep_id] == invalid_package_id) { + this.enqueueDependencyWithMainAndSuccessFn( + dep_id, + &this.lockfile.buffers.dependencies.items[dep_id], + invalid_package_id, + assignRootResolution, + failRootResolution, + ) catch |err| { + return .{ .failure = err }; + }; + } + + const resolution_id = switch (this.lockfile.buffers.resolutions.items[dep_id]) { + invalid_package_id => brk: { + this.drainDependencyList(); + + switch (this.options.log_level) { + inline else => |log_level| { + if (log_level.showProgress()) this.startProgressBarIfNone(); + while (this.pending_tasks > 0) : (this.sleep()) { + this.runTasks( + void, + {}, + .{ + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + }, + log_level, + ) catch |err| { + return .{ .failure = err }; + }; + } + }, + } + + break :brk this.lockfile.buffers.resolutions.items[dep_id]; + }, + // we managed to synchronously resolve the dependency + else => |pkg_id| pkg_id, + }; - this.drainDependencyList(); return .{ .resolution = .{ .resolution = this.lockfile.packages.items(.resolution)[resolution_id], @@ -1830,7 +1850,7 @@ pub const PackageManager = struct { const offset = this.preinstall_state.items.len; try this.preinstall_state.ensureTotalCapacity(this.allocator, count); this.preinstall_state.expandToCapacity(); - std.mem.set(PreinstallState, this.preinstall_state.items[offset..], PreinstallState.unknown); + @memset(this.preinstall_state.items[offset..], PreinstallState.unknown); } pub fn setPreinstallState(this: *PackageManager, package_id: PackageID, lockfile: *Lockfile, value: PreinstallState) void { @@ -2263,7 +2283,7 @@ pub const PackageManager = struct { var iter = dir.iterate(); while (try iter.next()) |entry| { - if (entry.kind != .Directory and entry.kind != .SymLink) continue; + if (entry.kind != .directory and entry.kind != .sym_link) continue; const name = entry.name; const sliced = SlicedString.init(name, name); const parsed = Semver.Version.parse(sliced, allocator); @@ -2305,7 +2325,7 @@ pub const PackageManager = struct { }; // TODO: make this fewer passes - std.sort.sort( + std.sort.block( Semver.Version, installed_versions.items, @as([]const u8, tags_buf.items), @@ -3546,7 +3566,34 @@ pub const PackageManager = struct { return package; }, - else => {}, + else => if (data.json_len > 0) { + const package_json_source = logger.Source.initPathString( + data.json_path, + data.json_buf[0..data.json_len], + ); + initializeStore(); + const json = json_parser.ParseJSONUTF8( + &package_json_source, + manager.log, + manager.allocator, + ) catch |err| { + if (comptime log_level != .silent) { + const string_buf = manager.lockfile.buffers.string_bytes.items; + Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{ + resolution.fmtURL(&manager.options, string_buf), + @errorName(err), + }); + } + Global.crash(); + }; + var builder = manager.lockfile.stringBuilder(); + Lockfile.Package.Scripts.parseCount(manager.allocator, &builder, json); + builder.allocate() catch unreachable; + if (comptime Environment.allow_assert) std.debug.assert(package_id.* != invalid_package_id); + var scripts = manager.lockfile.packages.items(.scripts)[package_id.*]; + scripts.parseAlloc(manager.allocator, &builder, json); + scripts.filled = true; + }, } return null; @@ -3752,7 +3799,7 @@ pub const PackageManager = struct { if (comptime log_level.isVerbose()) { Output.prettyError(" ", .{}); - Output.printElapsed(@intToFloat(f64, task.http.elapsed) / std.time.ns_per_ms); + Output.printElapsed(@floatFromInt(f64, task.http.elapsed) / std.time.ns_per_ms); Output.prettyError("\n <d>Downloaded <r><green>{s}<r> versions\n", .{name.slice()}); Output.flush(); } @@ -3886,7 +3933,7 @@ pub const PackageManager = struct { if (comptime log_level.isVerbose()) { Output.prettyError(" ", .{}); - Output.printElapsed(@floatCast(f64, @intToFloat(f64, task.http.elapsed) / std.time.ns_per_ms)); + Output.printElapsed(@floatCast(f64, @floatFromInt(f64, task.http.elapsed) / std.time.ns_per_ms)); Output.prettyError(" <d>Downloaded <r><green>{s}<r> tarball\n", .{extract.name.slice()}); Output.flush(); } @@ -3910,10 +3957,10 @@ pub const PackageManager = struct { var task: Task = task_; if (task.log.msgs.items.len > 0) { - if (Output.enable_ansi_colors) { - try task.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try task.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try task.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors); + }, } } @@ -4434,6 +4481,10 @@ pub const PackageManager = struct { this.remote_package_features.peer_dependencies = save; } + if (bun_install.exact) |exact| { + this.enable.exact_versions = exact; + } + if (bun_install.production) |production| { if (production) { this.local_package_features.dev_dependencies = false; @@ -4443,6 +4494,12 @@ pub const PackageManager = struct { } } + if (bun_install.frozen_lockfile) |frozen_lockfile| { + if (frozen_lockfile) { + this.enable.frozen_lockfile = true; + } + } + if (bun_install.save_optional) |save| { this.remote_package_features.optional_dependencies = save; this.local_package_features.optional_dependencies = save; @@ -4536,6 +4593,10 @@ pub const PackageManager = struct { this.scope.url = URL.parse(cli.registry); } + if (cli.exact) { + this.enable.exact_versions = true; + } + if (cli.token.len > 0) { this.scope.token = cli.token; } @@ -4676,6 +4737,10 @@ pub const PackageManager = struct { this.enable.frozen_lockfile = true; } + if (cli.frozen_lockfile) { + this.enable.frozen_lockfile = true; + } + if (cli.force) { this.enable.manifest_cache_control = false; this.enable.force_install = true; @@ -4718,6 +4783,8 @@ pub const PackageManager = struct { force_save_lockfile: bool = false, force_install: bool = false, + + exact_versions: bool = false, }; }; @@ -4765,6 +4832,7 @@ pub const PackageManager = struct { updates: []UpdateRequest, current_package_json: *JSAst.Expr, dependency_list: string, + exact_versions: bool, ) !void { const G = JSAst.G; @@ -4814,7 +4882,7 @@ pub const PackageManager = struct { var new_dependencies = try allocator.alloc(G.Property, dependencies.len + remaining - replacing); bun.copy(G.Property, new_dependencies, dependencies); - std.mem.set(G.Property, new_dependencies[dependencies.len..], G.Property{}); + @memset(new_dependencies[dependencies.len..], G.Property{}); outer: for (updates) |*update| { if (update.e_string != null) continue; @@ -4952,9 +5020,14 @@ pub const PackageManager = struct { if (update.e_string) |e_string| { e_string.data = switch (update.resolution.tag) { .npm => if (update.version.tag == .dist_tag and update.version.literal.isEmpty()) - std.fmt.allocPrint(allocator, "^{}", .{ - update.resolution.value.npm.version.fmt(update.version_buf), - }) catch unreachable + switch (exact_versions) { + false => std.fmt.allocPrint(allocator, "^{}", .{ + update.resolution.value.npm.version.fmt(update.version_buf), + }) catch unreachable, + true => std.fmt.allocPrint(allocator, "{}", .{ + update.resolution.value.npm.version.fmt(update.version_buf), + }) catch unreachable, + } else null, .uninitialized => switch (update.version.tag) { @@ -5257,6 +5330,8 @@ pub const PackageManager = struct { manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; manager.root_progress_node = manager.progress.start("", 0); manager.root_download_node = manager.root_progress_node.start(ProgressStrings.download(), 0); + } else { + manager.options.log_level = .default_no_progress; } if (!manager.options.enable.cache) { @@ -5298,36 +5373,31 @@ pub const PackageManager = struct { // When using bun, we only do staleness checks once per day ) -| std.time.s_per_day; - manager.lockfile = brk: { + if (root_dir.entries.hasComptimeQuery("bun.lockb")) { var buf: [bun.MAX_PATH_BYTES]u8 = undefined; + var parts = [_]string{ + "./bun.lockb", + }; + var lockfile_path = Path.joinAbsStringBuf( + Fs.FileSystem.instance.top_level_dir, + &buf, + &parts, + .auto, + ); + buf[lockfile_path.len] = 0; + var lockfile_path_z = buf[0..lockfile_path.len :0]; - if (root_dir.entries.hasComptimeQuery("bun.lockb")) { - var parts = [_]string{ - "./bun.lockb", - }; - var lockfile_path = Path.joinAbsStringBuf( - Fs.FileSystem.instance.top_level_dir, - &buf, - &parts, - .auto, - ); - buf[lockfile_path.len] = 0; - var lockfile_path_z = buf[0..lockfile_path.len :0]; - - const result = manager.lockfile.loadFromDisk( - allocator, - log, - lockfile_path_z, - ); - - if (result == .ok) { - break :brk result.ok; - } + switch (manager.lockfile.loadFromDisk( + allocator, + log, + lockfile_path_z, + )) { + .ok => |lockfile| manager.lockfile = lockfile, + else => try manager.lockfile.initEmpty(allocator), } - + } else { try manager.lockfile.initEmpty(allocator); - break :brk manager.lockfile; - }; + } return manager; } @@ -5438,7 +5508,7 @@ pub const PackageManager = struct { // create scope if specified if (name[0] == '@') { - if (std.mem.indexOfScalar(u8, name, '/')) |i| { + if (strings.indexOfChar(name, '/')) |i| { node_modules.dir.makeDir(name[0..i]) catch |err| brk: { if (err == error.PathAlreadyExists) break :brk; if (manager.options.log_level != .silent) @@ -5484,7 +5554,7 @@ pub const PackageManager = struct { // Done if (manager.options.log_level != .silent) Output.prettyln( - \\<r><green>Success!<r> Registered \"{[name]s}\" + \\<r><green>Success!<r> Registered "{[name]s}" \\ \\To use {[name]s} in a project, run: \\ <cyan>bun link {[name]s}<r> @@ -5503,11 +5573,7 @@ pub const PackageManager = struct { } else { // bun link lodash switch (manager.options.log_level) { - .default => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .default), - .verbose => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .verbose), - .silent => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .silent), - .default_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .default_no_progress), - .verbose_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, .link, .verbose_no_progress), + inline else => |log_level| try updatePackageJSONAndInstallWithManager(ctx, manager, .link, log_level), } } } @@ -5649,6 +5715,7 @@ pub const PackageManager = struct { clap.parseParam("--save Save to package.json") catch unreachable, clap.parseParam("--dry-run Don't install anything") catch unreachable, clap.parseParam("--lockfile <PATH> Store & load a lockfile at a specific filepath") catch unreachable, + clap.parseParam("--frozen-lockfile Disallow changes to lockfile") catch unreachable, clap.parseParam("-f, --force Always request the latest versions from the registry & reinstall all dependencies") catch unreachable, clap.parseParam("--cache-dir <PATH> Store & load cached data from a specific directory path") catch unreachable, clap.parseParam("--no-cache Ignore manifest cache entirely") catch unreachable, @@ -5680,6 +5747,7 @@ pub const PackageManager = struct { const add_params = install_params_ ++ [_]ParamType{ clap.parseParam("-d, --development Add dependency to \"devDependencies\"") catch unreachable, clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable, + clap.parseParam("--exact Add the exact version instead of the ^range") catch unreachable, clap.parseParam("<POS> ... \"name\" or \"name@version\" of packages to install") catch unreachable, }; @@ -5709,6 +5777,7 @@ pub const PackageManager = struct { yarn: bool = false, production: bool = false, + frozen_lockfile: bool = false, no_save: bool = false, dry_run: bool = false, force: bool = false, @@ -5729,6 +5798,8 @@ pub const PackageManager = struct { no_optional: bool = false, omit: Omit = Omit{}, + exact: bool = false, + const Omit = struct { dev: bool = false, optional: bool = true, @@ -5777,6 +5848,7 @@ pub const PackageManager = struct { var cli = CommandLineArguments{}; cli.yarn = args.flag("--yarn"); cli.production = args.flag("--production"); + cli.frozen_lockfile = args.flag("--frozen-lockfile"); cli.no_progress = args.flag("--no-progress"); cli.dry_run = args.flag("--dry-run"); cli.global = args.flag("--global"); @@ -5806,6 +5878,7 @@ pub const PackageManager = struct { if (comptime subcommand == .add) { cli.development = args.flag("--development"); cli.optional = args.flag("--optional"); + cli.exact = args.flag("--exact"); } // for (args.options("--omit")) |omit| { @@ -6006,11 +6079,7 @@ pub const PackageManager = struct { } switch (manager.options.log_level) { - .default => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .default), - .verbose => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .verbose), - .silent => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .silent), - .default_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .default_no_progress), - .verbose_no_progress => try updatePackageJSONAndInstallWithManager(ctx, manager, op, .verbose_no_progress), + inline else => |log_level| try updatePackageJSONAndInstallWithManager(ctx, manager, op, log_level), } } @@ -6140,13 +6209,12 @@ pub const PackageManager = struct { ) !void { if (ctx.log.errors > 0) { if (comptime log_level != .silent) { - if (Output.enable_ansi_colors) { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; + }, } } - Global.crash(); } @@ -6170,10 +6238,10 @@ pub const PackageManager = struct { initializeStore(); var current_package_json = json_parser.ParseJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| { - if (Output.enable_ansi_colors) { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; + }, } if (err == error.ParserError and ctx.log.errors > 0) { @@ -6267,7 +6335,13 @@ pub const PackageManager = struct { manager.to_remove = updates; }, .link, .add, .update => { - try PackageJSONEditor.edit(ctx.allocator, updates, ¤t_package_json, dependency_list); + try PackageJSONEditor.edit( + ctx.allocator, + updates, + ¤t_package_json, + dependency_list, + manager.options.enable.exact_versions, + ); manager.package_json_updates = updates; }, else => {}, @@ -6300,7 +6374,7 @@ pub const PackageManager = struct { // haha unless defer if (auto_free) bun.default_allocator.free(old_ast_nodes); - try installWithManager(ctx, manager, new_package_json_source, log_level); + try manager.installWithManager(ctx, new_package_json_source, log_level); if (op == .update or op == .add or op == .link) { for (manager.package_json_updates) |update| { @@ -6320,7 +6394,13 @@ pub const PackageManager = struct { return; }; - try PackageJSONEditor.edit(ctx.allocator, updates, ¤t_package_json, dependency_list); + try PackageJSONEditor.edit( + ctx.allocator, + updates, + ¤t_package_json, + dependency_list, + manager.options.enable.exact_versions, + ); var buffer_writer_two = try JSPrinter.BufferWriter.init(ctx.allocator); try buffer_writer_two.buffer.list.ensureTotalCapacity(ctx.allocator, new_package_json_source.len + 1); buffer_writer_two.append_newline = @@ -6372,7 +6452,7 @@ pub const PackageManager = struct { var iter: std.fs.IterableDir.Iterator = node_modules_bin.iterate(); iterator: while (iter.next() catch null) |entry| { switch (entry.kind) { - std.fs.IterableDir.Entry.Kind.SymLink => { + std.fs.IterableDir.Entry.Kind.sym_link => { // any symlinks which we are unable to open are assumed to be dangling // note that using access won't work here, because access doesn't resolve symlinks @@ -6422,11 +6502,7 @@ pub const PackageManager = struct { }; try switch (manager.options.log_level) { - .default => installWithManager(ctx, manager, package_json_contents, .default), - .verbose => installWithManager(ctx, manager, package_json_contents, .verbose), - .silent => installWithManager(ctx, manager, package_json_contents, .silent), - .default_no_progress => installWithManager(ctx, manager, package_json_contents, .default_no_progress), - .verbose_no_progress => installWithManager(ctx, manager, package_json_contents, .verbose_no_progress), + inline else => |log_level| manager.installWithManager(ctx, package_json_contents, log_level), }; } @@ -6547,7 +6623,7 @@ pub const PackageManager = struct { if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; } else { - @memcpy(&this.folder_path_buf, folder.ptr, folder.len); + @memcpy(this.folder_path_buf[0..folder.len], folder); this.folder_path_buf[folder.len] = 0; installer.cache_dir_subpath = this.folder_path_buf[0..folder.len :0]; } @@ -6567,7 +6643,7 @@ pub const PackageManager = struct { if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; } else { - @memcpy(&this.folder_path_buf, folder.ptr, folder.len); + @memcpy(this.folder_path_buf[0..folder.len], folder); this.folder_path_buf[folder.len] = 0; installer.cache_dir_subpath = this.folder_path_buf[0..folder.len :0]; } @@ -6580,10 +6656,10 @@ pub const PackageManager = struct { const args = .{ name, @errorName(err) }; if (comptime log_level.showProgress()) { - if (Output.enable_ansi_colors) { - this.progress.log(comptime Output.prettyFmt(fmt, true), args); - } else { - this.progress.log(comptime Output.prettyFmt(fmt, false), args); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, } } else { Output.prettyErrorln(fmt, args); @@ -6608,16 +6684,16 @@ pub const PackageManager = struct { const global_link_dir = this.manager.globalLinkDirPath() catch unreachable; var ptr = &this.folder_path_buf; var remain: []u8 = this.folder_path_buf[0..]; - @memcpy(ptr, global_link_dir.ptr, global_link_dir.len); + @memcpy(ptr[0..global_link_dir.len], global_link_dir); remain = remain[global_link_dir.len..]; if (global_link_dir[global_link_dir.len - 1] != std.fs.path.sep) { remain[0] = std.fs.path.sep; remain = remain[1..]; } - @memcpy(remain.ptr, folder.ptr, folder.len); + @memcpy(remain[0..folder.len], folder); remain = remain[folder.len..]; remain[0] = 0; - const len = @ptrToInt(remain.ptr) - @ptrToInt(ptr); + const len = @intFromPtr(remain.ptr) - @intFromPtr(ptr); installer.cache_dir_subpath = this.folder_path_buf[0..len :0]; installer.cache_dir = directory; } @@ -6626,7 +6702,7 @@ pub const PackageManager = struct { } const needs_install = this.force_install or this.skip_verify_installed_version_number or !installer.verify(resolution, buf); - this.summary.skipped += @as(u32, @boolToInt(!needs_install)); + this.summary.skipped += @as(u32, @intFromBool(!needs_install)); if (needs_install) { const result: PackageInstall.Result = switch (resolution.tag) { @@ -6637,7 +6713,7 @@ pub const PackageManager = struct { switch (result) { .success => { const is_duplicate = this.successfully_installed.isSet(package_id); - this.summary.success += @as(u32, @boolToInt(!is_duplicate)); + this.summary.success += @as(u32, @intFromBool(!is_duplicate)); this.successfully_installed.set(package_id); if (comptime log_level.showProgress()) { @@ -6686,10 +6762,10 @@ pub const PackageManager = struct { const args = .{ alias, @errorName(err) }; if (comptime log_level.showProgress()) { - if (Output.enable_ansi_colors) { - this.progress.log(comptime Output.prettyFmt(fmt, true), args); - } else { - this.progress.log(comptime Output.prettyFmt(fmt, false), args); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, } } else { Output.prettyErrorln(fmt, args); @@ -6705,61 +6781,56 @@ pub const PackageManager = struct { } } - var scripts = this.lockfile.packages.items(.scripts)[package_id]; - if (scripts.hasAny()) { - var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - const path_str = Path.joinAbsString( - bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable, - &[_]string{destination_dir_subpath}, - .posix, - ); + if (resolution.tag == .workspace or this.lockfile.trusted_dependencies.contains(@truncate(u32, String.Builder.stringHash(name)))) { + var scripts = this.lockfile.packages.items(.scripts)[package_id]; + if (scripts.hasAny()) { + var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const path_str = Path.joinAbsString( + bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable, + &[_]string{destination_dir_subpath}, + .posix, + ); - scripts.enqueue(this.lockfile, buf, path_str); - } else if (!scripts.filled and switch (resolution.tag) { - .folder => Features.folder.scripts, - .npm => Features.npm.scripts, - .git, .github, .gitlab, .local_tarball, .remote_tarball => Features.tarball.scripts, - .symlink => Features.link.scripts, - .workspace => Features.workspace.scripts, - else => false, - }) { - var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - const path_str = Path.joinAbsString( - bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable, - &[_]string{destination_dir_subpath}, - .posix, - ); + scripts.enqueue(this.lockfile, buf, path_str); + } else if (!scripts.filled) { + var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const path_str = Path.joinAbsString( + bun.getFdPath(this.node_modules_folder.dir.fd, &path_buf) catch unreachable, + &[_]string{destination_dir_subpath}, + .posix, + ); - scripts.enqueueFromPackageJSON( - this.manager.log, - this.lockfile, - this.node_modules_folder.dir, - destination_dir_subpath, - path_str, - ) catch |err| { - if (comptime log_level != .silent) { - const fmt = "\n<r><red>error:<r> failed to parse life-cycle scripts for <b>{s}<r>: {s}\n"; - const args = .{ name, @errorName(err) }; + scripts.enqueueFromPackageJSON( + this.manager.log, + this.lockfile, + this.node_modules_folder.dir, + destination_dir_subpath, + path_str, + ) catch |err| { + if (comptime log_level != .silent) { + const fmt = "\n<r><red>error:<r> failed to parse life-cycle scripts for <b>{s}<r>: {s}\n"; + const args = .{ name, @errorName(err) }; - if (comptime log_level.showProgress()) { - if (Output.enable_ansi_colors) { - this.progress.log(comptime Output.prettyFmt(fmt, true), args); + if (comptime log_level.showProgress()) { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, + } } else { - this.progress.log(comptime Output.prettyFmt(fmt, false), args); + Output.prettyErrorln(fmt, args); } - } else { - Output.prettyErrorln(fmt, args); } - } - if (this.manager.options.enable.fail_early) { - Global.exit(1); - } + if (this.manager.options.enable.fail_early) { + Global.exit(1); + } - Output.flush(); - this.summary.fail += 1; - return; - }; + Output.flush(); + this.summary.fail += 1; + return; + }; + } } }, .fail => |cause| { @@ -7014,7 +7085,10 @@ pub const PackageManager = struct { ) !PackageInstall.Summary { var lockfile = lockfile_; if (!this.options.local_package_features.dev_dependencies) { - lockfile = try lockfile.maybeCloneFilteringRootPackages(this.options.local_package_features); + lockfile = try lockfile.maybeCloneFilteringRootPackages( + this.options.local_package_features, + this.options.enable.exact_versions, + ); } var root_node: *Progress.Node = undefined; @@ -7228,10 +7302,10 @@ pub const PackageManager = struct { const args = .{ name, @errorName(err) }; if (comptime log_level.showProgress()) { - if (Output.enable_ansi_colors) { - this.progress.log(comptime Output.prettyFmt(fmt, true), args); - } else { - this.progress.log(comptime Output.prettyFmt(fmt, false), args); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, } } else { Output.prettyErrorln(fmt, args); @@ -7249,10 +7323,10 @@ pub const PackageManager = struct { const args = .{lockfile.str(&names[package_id])}; if (comptime log_level.showProgress()) { - if (Output.enable_ansi_colors) { - this.progress.log(comptime Output.prettyFmt(fmt, true), args); - } else { - this.progress.log(comptime Output.prettyFmt(fmt, false), args); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, } } else { Output.prettyErrorln(fmt, args); @@ -7299,8 +7373,8 @@ pub const PackageManager = struct { } fn installWithManager( - ctx: Command.Context, manager: *PackageManager, + ctx: Command.Context, package_json_contents: string, comptime log_level: Options.LogLevel, ) !void { @@ -7313,7 +7387,7 @@ pub const PackageManager = struct { manager.options.lockfile_path, ) else - Lockfile.LoadFromDiskResult{ .not_found = {} }; + .{ .not_found = {} }; var root = Lockfile.Package{}; var needs_new_lockfile = load_lockfile_result != .ok or (load_lockfile_result.ok.buffers.dependencies.items.len == 0 and manager.package_json_updates.len > 0); // this defaults to false @@ -7347,10 +7421,10 @@ pub const PackageManager = struct { } if (ctx.log.errors > 0) { - if (Output.enable_ansi_colors) { - try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors); + }, } } Output.flush(); @@ -7390,7 +7464,7 @@ pub const PackageManager = struct { ); } var mapping = try manager.lockfile.allocator.alloc(PackageID, maybe_root.dependencies.len); - std.mem.set(PackageID, mapping, invalid_package_id); + @memset(mapping, invalid_package_id); manager.summary = try Package.Diff.generate( ctx.allocator, @@ -7443,8 +7517,8 @@ pub const PackageManager = struct { var resolutions = manager.lockfile.buffers.resolutions.items.ptr[off .. off + len]; // It is too easy to accidentally undefined memory - std.mem.set(PackageID, resolutions, invalid_package_id); - std.mem.set(Dependency, dependencies, Dependency{}); + @memset(resolutions, invalid_package_id); + @memset(dependencies, Dependency{}); manager.lockfile.buffers.dependencies.items = manager.lockfile.buffers.dependencies.items.ptr[0 .. off + len]; manager.lockfile.buffers.resolutions.items = manager.lockfile.buffers.resolutions.items.ptr[0 .. off + len]; @@ -7554,10 +7628,10 @@ pub const PackageManager = struct { } } - if (Output.enable_ansi_colors) { - try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors); + }, } if (manager.log.hasErrors()) Global.crash(); @@ -7565,7 +7639,11 @@ pub const PackageManager = struct { const needs_clean_lockfile = had_any_diffs or needs_new_lockfile or manager.package_json_updates.len > 0; var did_meta_hash_change = needs_clean_lockfile; if (needs_clean_lockfile) { - manager.lockfile = try manager.lockfile.cleanWithLogger(manager.package_json_updates, manager.log); + manager.lockfile = try manager.lockfile.cleanWithLogger( + manager.package_json_updates, + manager.log, + manager.options.enable.exact_versions, + ); } if (manager.lockfile.packages.len > 0) { @@ -7718,10 +7796,10 @@ pub const PackageManager = struct { .successfully_installed = install_summary.successfully_installed, }; - if (Output.enable_ansi_colors) { - try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), true); - } else { - try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), false); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), enable_ansi_colors); + }, } if (!did_meta_hash_change) { diff --git a/src/install/integrity.zig b/src/install/integrity.zig index e54d9b7e1..4634c2dfd 100644 --- a/src/install/integrity.zig +++ b/src/install/integrity.zig @@ -41,7 +41,7 @@ pub const Integrity = extern struct { var i: usize = 0; { - std.mem.set(u8, &integrity.value, 0); + @memset(&integrity.value, 0); } while (i < end) { @@ -111,13 +111,13 @@ pub const Integrity = extern struct { _, pub inline fn isSupported(this: Tag) bool { - return @enumToInt(this) >= @enumToInt(Tag.sha1) and @enumToInt(this) <= @enumToInt(Tag.sha512); + return @intFromEnum(this) >= @intFromEnum(Tag.sha1) and @intFromEnum(this) <= @intFromEnum(Tag.sha512); } pub fn parse(buf: []const u8) Tag { const Matcher = strings.ExactSizeMatcher(8); - const i = std.mem.indexOfScalar(u8, buf[0..@min(buf.len, 7)], '-') orelse return Tag.unknown; + const i = strings.indexOfChar(buf[0..@min(buf.len, 7)], '-') orelse return Tag.unknown; return switch (Matcher.match(buf[0..i])) { Matcher.case("sha1") => Tag.sha1, diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 5c27c1a69..843381228 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -85,6 +85,7 @@ const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8; const zero_hash = std.mem.zeroes(MetaHash); const NameHashMap = std.ArrayHashMapUnmanaged(u32, String, ArrayIdentityContext, false); +const NameHashSet = std.ArrayHashMapUnmanaged(u32, void, ArrayIdentityContext, false); // Serialized data /// The version of the lockfile format, intended to prevent data corruption for format changes. @@ -103,6 +104,7 @@ allocator: Allocator, scratch: Scratch = .{}, scripts: Scripts = .{}, +trusted_dependencies: NameHashSet = .{}, workspace_paths: NameHashMap = .{}, const Stream = std.io.FixedBufferStream([]u8); @@ -113,15 +115,15 @@ pub const Scripts = struct { cwd: string, script: string, }; - const StringArrayList = std.ArrayListUnmanaged(Entry); + const Entries = std.ArrayListUnmanaged(Entry); const RunCommand = @import("../cli/run_command.zig").RunCommand; - preinstall: StringArrayList = .{}, - install: StringArrayList = .{}, - postinstall: StringArrayList = .{}, - preprepare: StringArrayList = .{}, - prepare: StringArrayList = .{}, - postprepare: StringArrayList = .{}, + preinstall: Entries = .{}, + install: Entries = .{}, + postinstall: Entries = .{}, + preprepare: Entries = .{}, + prepare: Entries = .{}, + postprepare: Entries = .{}, pub fn hasAny(this: *Scripts) bool { inline for (Package.Scripts.Hooks) |hook| { @@ -195,6 +197,7 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *log this.format = FormatVersion.current; this.scripts = .{}; + this.trusted_dependencies = .{}; this.workspace_paths = .{}; Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| { @@ -343,31 +346,23 @@ pub const Tree = struct { dependencies: []const Dependency, resolution_lists: []const Lockfile.DependencyIDSlice, queue: Lockfile.TreeFiller, - log: ?*logger.Log = null, - old_lockfile: ?*Lockfile = null, + log: *logger.Log, + old_lockfile: *Lockfile, pub fn maybeReportError(this: *Builder, comptime fmt: string, args: anytype) void { - var log = this.log orelse return; - if (this.old_lockfile == null) return; - - log.addErrorFmt(null, logger.Loc.Empty, this.allocator, fmt, args) catch {}; + this.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, fmt, args) catch {}; } pub fn buf(this: *const Builder) []const u8 { - var lockfile = this.old_lockfile orelse return ""; - return lockfile.buffers.string_bytes.items; + return this.old_lockfile.buffers.string_bytes.items; } pub fn packageName(this: *Builder, id: PackageID) String.Formatter { - var lockfile = this.old_lockfile orelse return undefined; - - return lockfile.packages.items(.name)[id].fmt(lockfile.buffers.string_bytes.items); + return this.old_lockfile.packages.items(.name)[id].fmt(this.old_lockfile.buffers.string_bytes.items); } pub fn packageVersion(this: *Builder, id: PackageID) Resolution.Formatter { - var lockfile = this.old_lockfile orelse return undefined; - - return lockfile.packages.items(.resolution)[id].fmt(lockfile.buffers.string_bytes.items); + return this.old_lockfile.packages.items(.resolution)[id].fmt(this.old_lockfile.buffers.string_bytes.items); } pub const Entry = struct { @@ -499,15 +494,14 @@ pub const Tree = struct { if (dep.name_hash != dependency.name_hash) continue; if (builder.resolutions[dep_id] != package_id) { if (as_defined and !dep.behavior.isPeer()) { - if (builder.log != null) - builder.maybeReportError("Package \"{}@{}\" has a dependency loop\n Resolution: \"{}@{}\"\n Dependency: \"{}@{}\"", .{ - builder.packageName(package_id), - builder.packageVersion(package_id), - builder.packageName(builder.resolutions[dep_id]), - builder.packageVersion(builder.resolutions[dep_id]), - dependency.name.fmt(builder.buf()), - dependency.version.literal.fmt(builder.buf()), - }); + builder.maybeReportError("Package \"{}@{}\" has a dependency loop\n Resolution: \"{}@{}\"\n Dependency: \"{}@{}\"", .{ + builder.packageName(package_id), + builder.packageVersion(package_id), + builder.packageName(builder.resolutions[dep_id]), + builder.packageVersion(builder.resolutions[dep_id]), + dependency.name.fmt(builder.buf()), + dependency.version.literal.fmt(builder.buf()), + }); return error.DependencyLoop; } // ignore versioning conflicts caused by peer dependencies @@ -544,6 +538,7 @@ pub const Tree = struct { pub fn maybeCloneFilteringRootPackages( old: *Lockfile, features: Features, + exact_versions: bool, ) !*Lockfile { const old_root_dependenices_list = old.packages.items(.dependencies)[0]; var old_root_resolutions = old.packages.items(.resolutions)[0]; @@ -561,10 +556,10 @@ pub fn maybeCloneFilteringRootPackages( if (!any_changes) return old; - return try old.clean(&.{}); + return try old.clean(&.{}, exact_versions); } -fn preprocessUpdateRequests(old: *Lockfile, updates: []PackageManager.UpdateRequest) !void { +fn preprocessUpdateRequests(old: *Lockfile, updates: []PackageManager.UpdateRequest, exact_versions: bool) !void { const root_deps_list: Lockfile.DependencySlice = old.packages.items(.dependencies)[0]; if (@as(usize, root_deps_list.off) < old.buffers.dependencies.items.len) { var string_builder = old.stringBuilder(); @@ -581,7 +576,10 @@ fn preprocessUpdateRequests(old: *Lockfile, updates: []PackageManager.UpdateRequ if (dep.name_hash == String.Builder.stringHash(update.name)) { if (old_resolution > old.packages.len) continue; const res = resolutions_of_yore[old_resolution]; - const len = std.fmt.count("^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}); + const len = switch (exact_versions) { + false => std.fmt.count("^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}), + true => std.fmt.count("{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}), + }; if (len >= String.max_inline_len) { string_builder.cap += len; } @@ -609,7 +607,10 @@ fn preprocessUpdateRequests(old: *Lockfile, updates: []PackageManager.UpdateRequ if (dep.name_hash == String.Builder.stringHash(update.name)) { if (old_resolution > old.packages.len) continue; const res = resolutions_of_yore[old_resolution]; - var buf = std.fmt.bufPrint(&temp_buf, "^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break; + var buf = switch (exact_versions) { + false => std.fmt.bufPrint(&temp_buf, "^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break, + true => std.fmt.bufPrint(&temp_buf, "{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break, + }; const external_version = string_builder.append(ExternalString, buf); const sliced = external_version.value.sliced(old.buffers.string_bytes.items); dep.version = Dependency.parse( @@ -628,17 +629,36 @@ fn preprocessUpdateRequests(old: *Lockfile, updates: []PackageManager.UpdateRequ } } } -pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile { - return old.cleanWithLogger(updates, null); +pub fn clean( + old: *Lockfile, + updates: []PackageManager.UpdateRequest, + exact_versions: bool, +) !*Lockfile { + // This is wasteful, but we rarely log anything so it's fine. + var log = logger.Log.init(bun.default_allocator); + defer { + for (log.msgs.items) |*item| { + item.deinit(bun.default_allocator); + } + log.deinit(); + } + + return old.cleanWithLogger(updates, &log, exact_versions); } -pub fn cleanWithLogger(old: *Lockfile, updates: []PackageManager.UpdateRequest, log: ?*logger.Log) !*Lockfile { +pub fn cleanWithLogger( + old: *Lockfile, + updates: []PackageManager.UpdateRequest, + log: *logger.Log, + exact_versions: bool, +) !*Lockfile { + const old_trusted_dependencies = old.trusted_dependencies; const old_scripts = old.scripts; // We will only shrink the number of packages here. // never grow if (updates.len > 0) { - try old.preprocessUpdateRequests(updates); + try old.preprocessUpdateRequests(updates, exact_versions); } // Deduplication works like this @@ -693,8 +713,7 @@ pub fn cleanWithLogger(old: *Lockfile, updates: []PackageManager.UpdateRequest, const root = old.rootPackage() orelse return error.NoPackage; var package_id_mapping = try old.allocator.alloc(PackageID, old.packages.len); - std.mem.set( - PackageID, + @memset( package_id_mapping, invalid_package_id, ); @@ -739,6 +758,7 @@ pub fn cleanWithLogger(old: *Lockfile, updates: []PackageManager.UpdateRequest, } } } + new.trusted_dependencies = old_trusted_dependencies; new.scripts = old_scripts; return new; } @@ -781,7 +801,7 @@ const Cloner = struct { mapping: []PackageID, trees: Tree.List = Tree.List{}, trees_count: u32 = 1, - log: ?*logger.Log = null, + log: *logger.Log, pub fn flush(this: *Cloner) anyerror!void { const max_package_id = this.old.packages.len; @@ -910,10 +930,10 @@ pub const Printer = struct { }), } if (log.errors > 0) { - if (Output.enable_ansi_colors) { - try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true); - } else { - try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false); + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors); + }, } } Global.crash(); @@ -998,7 +1018,7 @@ pub const Printer = struct { const dependencies_buffer: []const Dependency = this.lockfile.buffers.dependencies.items; const string_buf = this.lockfile.buffers.string_bytes.items; var id_map = try default_allocator.alloc(DependencyID, this.updates.len); - std.mem.set(DependencyID, id_map, invalid_package_id); + @memset(id_map, invalid_package_id); defer if (id_map.len > 0) default_allocator.free(id_map); visited.set(0); @@ -1211,12 +1231,12 @@ pub const Printer = struct { } var dependency_versions = requested_version_start[0..j]; - if (dependency_versions.len > 1) std.sort.insertionSort(Dependency.Version, dependency_versions, string_buf, Dependency.Version.isLessThan); + if (dependency_versions.len > 1) std.sort.insertion(Dependency.Version, dependency_versions, string_buf, Dependency.Version.isLessThan); try requested_versions.put(i, dependency_versions); } } - std.sort.sort( + std.sort.block( PackageID, alphabetized_names, Lockfile.Package.Alphabetizer{ @@ -1494,6 +1514,7 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) !void { .allocator = allocator, .scratch = Scratch.init(allocator), .scripts = .{}, + .trusted_dependencies = .{}, .workspace_paths = .{}, }; } @@ -1908,11 +1929,11 @@ pub const Package = extern struct { field: string, behavior: Behavior, - pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @intToEnum(Behavior, Behavior.normal) }; - pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @intToEnum(Behavior, Behavior.dev) }; - pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @intToEnum(Behavior, Behavior.optional) }; - pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @intToEnum(Behavior, Behavior.peer) }; - pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @intToEnum(Behavior, Behavior.workspace) }; + pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @enumFromInt(Behavior, Behavior.normal) }; + pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @enumFromInt(Behavior, Behavior.dev) }; + pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @enumFromInt(Behavior, Behavior.optional) }; + pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @enumFromInt(Behavior, Behavior.peer) }; + pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @enumFromInt(Behavior, Behavior.workspace) }; }; pub inline fn isDisabled(this: *const Lockfile.Package) bool { @@ -2032,21 +2053,22 @@ pub const Package = extern struct { builder.clamp(); - cloner.trees_count += @as(u32, @boolToInt(old_resolutions.len > 0)); + cloner.trees_count += @as(u32, @intFromBool(old_resolutions.len > 0)); - for (old_resolutions, 0..) |old_resolution, i| { - if (old_resolution >= max_package_id) continue; + for (old_resolutions, resolutions, 0..) |old_resolution, *resolution, i| { + if (old_resolution >= max_package_id) { + resolution.* = invalid_package_id; + continue; + } const mapped = package_id_mapping[old_resolution]; - const resolve_id = new_package.resolutions.off + @intCast(PackageID, i); - if (mapped < max_package_id) { - resolutions[i] = mapped; + resolution.* = mapped; } else { try cloner.clone_queue.append(.{ .old_resolution = old_resolution, .parent = new_package.meta.id, - .resolve_id = resolve_id, + .resolve_id = new_package.resolutions.off + @intCast(PackageID, i), }); } } @@ -2108,7 +2130,7 @@ pub const Package = extern struct { if (comptime Environment.allow_assert) std.debug.assert(dependencies_list.items.len == resolutions_list.items.len); var dependencies: []Dependency = dependencies_list.items.ptr[dependencies_list.items.len..total_len]; - std.mem.set(Dependency, dependencies, Dependency{}); + @memset(dependencies, Dependency{}); const package_dependencies = package_json.dependencies.map.values(); const source_buf = package_json.dependencies.source_buf; @@ -2135,7 +2157,7 @@ pub const Package = extern struct { const new_length = package.dependencies.len + dependencies_list.items.len; - std.mem.set(PackageID, resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); + @memset(resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); dependencies_list.items = dependencies_list.items.ptr[0..new_length]; resolutions_list.items = resolutions_list.items.ptr[0..new_length]; @@ -2160,10 +2182,10 @@ pub const Package = extern struct { const dependency_groups = comptime brk: { var out_groups: [ - @as(usize, @boolToInt(features.dependencies)) + - @as(usize, @boolToInt(features.dev_dependencies)) + - @as(usize, @boolToInt(features.optional_dependencies)) + - @as(usize, @boolToInt(features.peer_dependencies)) + @as(usize, @intFromBool(features.dependencies)) + + @as(usize, @intFromBool(features.dev_dependencies)) + + @as(usize, @intFromBool(features.optional_dependencies)) + + @as(usize, @intFromBool(features.peer_dependencies)) ]DependencyGroup = undefined; var out_group_i: usize = 0; @@ -2252,7 +2274,7 @@ pub const Package = extern struct { if (comptime Environment.allow_assert) std.debug.assert(dependencies_list.items.len == resolutions_list.items.len); var dependencies = dependencies_list.items.ptr[dependencies_list.items.len..total_len]; - std.mem.set(Dependency, dependencies, .{}); + @memset(dependencies, .{}); total_dependencies_count = 0; inline for (dependency_groups) |group| { @@ -2323,7 +2345,7 @@ pub const Package = extern struct { const new_length = package.dependencies.len + dependencies_list.items.len; - std.mem.set(PackageID, resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); + @memset(resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); dependencies_list.items = dependencies_list.items.ptr[0..new_length]; resolutions_list.items = resolutions_list.items.ptr[0..new_length]; @@ -2408,7 +2430,7 @@ pub const Package = extern struct { }; pub fn hash(name: string, version: Semver.Version) u64 { - var hasher = std.hash.Wyhash.init(0); + var hasher = bun.Wyhash.init(0); hasher.update(name); hasher.update(std.mem.asBytes(&version)); return hasher.final(); @@ -2438,12 +2460,11 @@ pub const Package = extern struct { initializeStore(); const json = json_parser.ParseJSONUTF8(&source, log, allocator) catch |err| { - if (Output.enable_ansi_colors) { - log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; - } else { - log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; + }, } - Output.prettyErrorln("<r><red>{s}<r> parsing package.json in <b>\"{s}\"<r>", .{ @errorName(err), source.path.prettyDir() }); Global.crash(); }; @@ -2956,9 +2977,7 @@ pub const Package = extern struct { } } - if (comptime features.scripts) { - Package.Scripts.parseCount(allocator, &string_builder, json); - } + Package.Scripts.parseCount(allocator, &string_builder, json); if (comptime ResolverContext != void) { resolver.count(*Lockfile.StringBuilder, &string_builder, json); @@ -2966,11 +2985,11 @@ pub const Package = extern struct { const dependency_groups = comptime brk: { var out_groups: [ - @as(usize, @boolToInt(features.dependencies)) + - @as(usize, @boolToInt(features.dev_dependencies)) + - @as(usize, @boolToInt(features.optional_dependencies)) + - @as(usize, @boolToInt(features.peer_dependencies)) + - @as(usize, @boolToInt(features.workspaces)) + @as(usize, @intFromBool(features.dependencies)) + + @as(usize, @intFromBool(features.dev_dependencies)) + + @as(usize, @intFromBool(features.optional_dependencies)) + + @as(usize, @intFromBool(features.peer_dependencies)) + + @as(usize, @intFromBool(features.workspaces)) ]DependencyGroup = undefined; var out_group_i: usize = 0; @@ -3084,9 +3103,10 @@ pub const Package = extern struct { string_builder.count(key); string_builder.count(value); - // If it's a folder, pessimistically assume we will need a maximum path - if (Dependency.Version.Tag.infer(value) == .folder) { - string_builder.cap += bun.MAX_PATH_BYTES; + // If it's a folder or workspace, pessimistically assume we will need a maximum path + switch (Dependency.Version.Tag.infer(value)) { + .folder, .workspace => string_builder.cap += bun.MAX_PATH_BYTES, + else => {}, } } total_dependencies_count += @truncate(u32, obj.properties.len); @@ -3113,6 +3133,37 @@ pub const Package = extern struct { } } + if (comptime features.trusted_dependencies) { + if (json.asProperty("trustedDependencies")) |q| { + switch (q.expr.data) { + .e_array => |arr| { + try lockfile.trusted_dependencies.ensureUnusedCapacity(allocator, arr.items.len); + for (arr.slice()) |item| { + const name = item.asString(allocator) orelse { + log.addErrorFmt(&source, q.loc, allocator, + \\trustedDependencies expects an array of strings, e.g. + \\"trustedDependencies": [ + \\ "package_name" + \\] + , .{}) catch {}; + return error.InvalidPackageJSON; + }; + lockfile.trusted_dependencies.putAssumeCapacity(@truncate(u32, String.Builder.stringHash(name)), {}); + } + }, + else => { + log.addErrorFmt(&source, q.loc, allocator, + \\trustedDependencies expects an array of strings, e.g. + \\"trustedDependencies": [ + \\ "package_name" + \\] + , .{}) catch {}; + return error.InvalidPackageJSON; + }, + } + } + } + try string_builder.allocate(); try lockfile.buffers.dependencies.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); try lockfile.buffers.resolutions.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); @@ -3233,9 +3284,7 @@ pub const Package = extern struct { } } - if (comptime features.scripts) { - package.scripts.parseAlloc(allocator, &string_builder, json); - } + package.scripts.parseAlloc(allocator, &string_builder, json); package.scripts.filled = true; // It is allowed for duplicate dependencies to exist in optionalDependencies and regular dependencies @@ -3320,7 +3369,7 @@ pub const Package = extern struct { } } - std.sort.sort( + std.sort.block( Dependency, package_dependencies[0..total_dependencies_count], lockfile.buffers.string_bytes.items, @@ -3332,7 +3381,7 @@ pub const Package = extern struct { package.resolutions = @bitCast(@TypeOf(package.resolutions), package.dependencies); - std.mem.set(PackageID, lockfile.buffers.resolutions.items.ptr[off..total_len], invalid_package_id); + @memset(lockfile.buffers.resolutions.items.ptr[off..total_len], invalid_package_id); const new_len = off + total_dependencies_count; lockfile.buffers.dependencies.items = lockfile.buffers.dependencies.items.ptr[0..new_len]; @@ -3397,7 +3446,7 @@ pub const Package = extern struct { } }; var trash: i32 = undefined; // workaround for stage1 compiler bug - std.sort.sort(Data, &data, &trash, Sort.lessThan); + std.sort.block(Data, &data, &trash, Sort.lessThan); var sizes_bytes: [fields.len]usize = undefined; var field_indexes: [fields.len]usize = undefined; var Types: [fields.len]type = undefined; @@ -3492,10 +3541,10 @@ pub const Package = extern struct { var bytes = std.mem.sliceAsBytes(sliced.items(@field(Lockfile.Package.List.Field, field.name))); const end_pos = stream.pos + bytes.len; if (end_pos <= end_at) { - @memcpy(bytes.ptr, stream.buffer[stream.pos..].ptr, bytes.len); + @memcpy(bytes, stream.buffer[stream.pos..][0..bytes.len]); stream.pos = end_pos; } else if (comptime strings.eqlComptime(field.name, "scripts")) { - @memset(bytes.ptr, 0, bytes.len); + @memset(bytes, 0); } else { return error.@"Lockfile validation failed: invalid package list range"; } @@ -3511,6 +3560,7 @@ pub fn deinit(this: *Lockfile) void { this.packages.deinit(this.allocator); this.string_pool.deinit(); this.scripts.deinit(this.allocator); + this.trusted_dependencies.deinit(this.allocator); this.workspace_paths.deinit(this.allocator); } @@ -3564,7 +3614,7 @@ const Buffers = struct { } }; var trash: i32 = undefined; // workaround for stage1 compiler bug - std.sort.sort(Data, &data, &trash, Sort.lessThan); + std.sort.block(Data, &data, &trash, Sort.lessThan); var sizes_bytes: [fields.len]usize = undefined; var names: [fields.len][]const u8 = undefined; var types: [fields.len]type = undefined; @@ -3808,7 +3858,7 @@ pub const Serializer = struct { var writer = stream.writer(); try writer.writeAll(header_bytes); - try writer.writeIntLittle(u32, @enumToInt(this.format)); + try writer.writeIntLittle(u32, @intFromEnum(this.format)); try writer.writeAll(&this.meta_hash); @@ -3840,7 +3890,7 @@ pub const Serializer = struct { } var format = try reader.readIntLittle(u32); - if (format != @enumToInt(Lockfile.FormatVersion.current)) { + if (format != @intFromEnum(Lockfile.FormatVersion.current)) { return error.@"Outdated lockfile version"; } @@ -3928,7 +3978,7 @@ fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash } } - std.sort.sort( + std.sort.block( PackageID, alphabetized_names, Lockfile.Package.Alphabetizer{ diff --git a/src/install/npm.zig b/src/install/npm.zig index c01e6ee1f..6edb6dcb4 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -80,14 +80,14 @@ pub const Registry = struct { url.path = pathname; } - while (std.mem.lastIndexOfScalar(u8, pathname, ':')) |colon| { + while (strings.lastIndexOfChar(pathname, ':')) |colon| { var segment = pathname[colon + 1 ..]; pathname = pathname[0..colon]; if (pathname.len > 1 and pathname[pathname.len - 1] == '/') { pathname = pathname[0 .. pathname.len - 1]; } - const eql_i = std.mem.indexOfScalar(u8, segment, '=') orelse continue; + const eql_i = strings.indexOfChar(segment, '=') orelse continue; var value = segment[eql_i + 1 ..]; segment = segment[0..eql_i]; @@ -121,9 +121,9 @@ pub const Registry = struct { if (registry.username.len > 0 and registry.password.len > 0 and auth.len == 0) { var output_buf = try allocator.alloc(u8, registry.username.len + registry.password.len + 1 + std.base64.standard.Encoder.calcSize(registry.username.len + registry.password.len + 1)); var input_buf = output_buf[0 .. registry.username.len + registry.password.len + 1]; - @memcpy(input_buf.ptr, registry.username.ptr, registry.username.len); + @memcpy(input_buf[0..registry.username.len], registry.username); input_buf[registry.username.len] = ':'; - @memcpy(input_buf[registry.username.len + 1 ..].ptr, registry.password.ptr, registry.password.len); + @memcpy(input_buf[registry.username.len + 1 ..][0..registry.password.len], registry.password); output_buf = output_buf[input_buf.len..]; auth = std.base64.standard.Encoder.encode(output_buf, input_buf); break :outer; @@ -259,9 +259,9 @@ pub const OperatingSystem = enum(u16) { pub fn isMatch(this: OperatingSystem) bool { if (comptime Environment.isLinux) { - return (@enumToInt(this) & linux) != 0; + return (@intFromEnum(this) & linux) != 0; } else if (comptime Environment.isMac) { - return (@enumToInt(this) & darwin) != 0; + return (@intFromEnum(this) & darwin) != 0; } else { return false; } @@ -282,7 +282,7 @@ pub const OperatingSystem = enum(u16) { if (str.len == 0) { return this_; } - const this = @enumToInt(this_); + const this = @intFromEnum(this_); const is_not = str[0] == '!'; const offset: usize = if (str[0] == '!') 1 else 0; @@ -290,9 +290,9 @@ pub const OperatingSystem = enum(u16) { const field: u16 = NameMap.get(str[offset..]) orelse return this_; if (is_not) { - return @intToEnum(OperatingSystem, this & ~field); + return @enumFromInt(OperatingSystem, this & ~field); } else { - return @intToEnum(OperatingSystem, this | field); + return @enumFromInt(OperatingSystem, this | field); } } }; @@ -334,9 +334,9 @@ pub const Architecture = enum(u16) { pub fn isMatch(this: Architecture) bool { if (comptime Environment.isAarch64) { - return (@enumToInt(this) & arm64) != 0; + return (@intFromEnum(this) & arm64) != 0; } else if (comptime Environment.isX64) { - return (@enumToInt(this) & x64) != 0; + return (@intFromEnum(this) & x64) != 0; } else { return false; } @@ -346,7 +346,7 @@ pub const Architecture = enum(u16) { if (str.len == 0) { return this_; } - const this = @enumToInt(this_); + const this = @intFromEnum(this_); const is_not = str[0] == '!'; const offset: usize = if (str[0] == '!') 1 else 0; @@ -355,9 +355,9 @@ pub const Architecture = enum(u16) { const field: u16 = NameMap.get(input) orelse return this_; if (is_not) { - return @intToEnum(Architecture, this & ~field); + return @enumFromInt(Architecture, this & ~field); } else { - return @intToEnum(Architecture, this | field); + return @enumFromInt(Architecture, this | field); } } }; @@ -496,7 +496,7 @@ pub const PackageManifest = struct { } }; var trash: i32 = undefined; // workaround for stage1 compiler bug - std.sort.sort(Data, &data, &trash, Sort.lessThan); + std.sort.block(Data, &data, &trash, Sort.lessThan); var sizes_bytes: [fields.len]usize = undefined; var names: [fields.len][]const u8 = undefined; for (data, 0..) |elem, i| { @@ -571,7 +571,7 @@ pub const PackageManifest = struct { } pub fn save(this: *const PackageManifest, tmpdir: std.fs.IterableDir, cache_dir: std.fs.IterableDir) !void { - const file_id = std.hash.Wyhash.hash(0, this.name()); + const file_id = bun.Wyhash.hash(0, this.name()); var dest_path_buf: [512 + 64]u8 = undefined; var out_path_buf: ["-18446744073709551615".len + ".npm".len + 1]u8 = undefined; var dest_path_stream = std.io.fixedBufferStream(&dest_path_buf); @@ -588,7 +588,7 @@ pub const PackageManifest = struct { } pub fn load(allocator: std.mem.Allocator, cache_dir: std.fs.IterableDir, package_name: string) !?PackageManifest { - const file_id = std.hash.Wyhash.hash(0, package_name); + const file_id = bun.Wyhash.hash(0, package_name); var file_path_buf: [512 + 64]u8 = undefined; const hex_fmt = bun.fmt.hexIntLower(file_id); var file_path = try std.fmt.bufPrintZ(&file_path_buf, "{any}.npm", .{hex_fmt}); @@ -630,7 +630,7 @@ pub const PackageManifest = struct { inline for (sizes.fields) |field_name| { if (comptime strings.eqlComptime(field_name, "pkg")) { - pkg_stream.pos = std.mem.alignForward(pkg_stream.pos, @alignOf(Npm.NpmPackage)); + pkg_stream.pos = std.mem.alignForward(usize, pkg_stream.pos, @alignOf(Npm.NpmPackage)); var reader = pkg_stream.reader(); package_manifest.pkg = try reader.readStruct(NpmPackage); } else { @@ -847,11 +847,11 @@ pub const PackageManifest = struct { for (versions) |prop| { const version_name = prop.key.?.asString(allocator) orelse continue; - if (std.mem.indexOfScalar(u8, version_name, '-') != null) { + if (strings.indexOfChar(version_name, '-') != null) { pre_versions_len += 1; extern_string_count += 1; } else { - extern_string_count += @as(usize, @boolToInt(std.mem.indexOfScalar(u8, version_name, '+') != null)); + extern_string_count += @as(usize, @intFromBool(strings.indexOfChar(version_name, '+') != null)); release_versions_len += 1; } @@ -862,7 +862,7 @@ pub const PackageManifest = struct { if (tarball_prop.data == .e_string) { const tarball = tarball_prop.data.e_string.slice(allocator); string_builder.count(tarball); - tarball_urls_count += @as(usize, @boolToInt(tarball.len > 0)); + tarball_urls_count += @as(usize, @intFromBool(tarball.len > 0)); } } } @@ -961,19 +961,19 @@ pub const PackageManifest = struct { if (versioned_packages.len > 0) { var versioned_packages_bytes = std.mem.sliceAsBytes(versioned_packages); - @memset(versioned_packages_bytes.ptr, 0, versioned_packages_bytes.len); + @memset(versioned_packages_bytes, 0); } if (all_semver_versions.len > 0) { var all_semver_versions_bytes = std.mem.sliceAsBytes(all_semver_versions); - @memset(all_semver_versions_bytes.ptr, 0, all_semver_versions_bytes.len); + @memset(all_semver_versions_bytes, 0); } if (all_extern_strings.len > 0) { var all_extern_strings_bytes = std.mem.sliceAsBytes(all_extern_strings); - @memset(all_extern_strings_bytes.ptr, 0, all_extern_strings_bytes.len); + @memset(all_extern_strings_bytes, 0); } if (version_extern_strings.len > 0) { var version_extern_strings_bytes = std.mem.sliceAsBytes(version_extern_strings); - @memset(version_extern_strings_bytes.ptr, 0, version_extern_strings_bytes.len); + @memset(version_extern_strings_bytes, 0); } var versioned_package_releases = versioned_packages[0..release_versions_len]; @@ -998,7 +998,7 @@ pub const PackageManifest = struct { var string_buf: string = ""; if (string_builder.ptr) |ptr| { // 0 it out for better determinism - @memset(ptr, 0, string_builder.cap); + @memset(ptr[0..string_builder.cap], 0); string_buf = ptr[0..string_builder.cap]; } @@ -1253,8 +1253,8 @@ pub const PackageManifest = struct { var this_names = dependency_names[0..count]; var this_versions = dependency_values[0..count]; - var name_hasher = std.hash.Wyhash.init(0); - var version_hasher = std.hash.Wyhash.init(0); + var name_hasher = bun.Wyhash.init(0); + var version_hasher = bun.Wyhash.init(0); const is_peer = comptime strings.eqlComptime(pair.prop, "peerDependencies"); @@ -1495,7 +1495,7 @@ pub const PackageManifest = struct { if (src.len > 0) { var dst = std.mem.sliceAsBytes(all_extern_strings[all_extern_strings.len - extern_strings.len ..]); std.debug.assert(dst.len >= src.len); - @memcpy(dst.ptr, src.ptr, src.len); + @memcpy(dst[0..src.len], src); } all_extern_strings = all_extern_strings[0 .. all_extern_strings.len - extern_strings.len]; diff --git a/src/install/repository.zig b/src/install/repository.zig index c4b68d9be..6546481e9 100644 --- a/src/install/repository.zig +++ b/src/install/repository.zig @@ -94,7 +94,7 @@ pub const Repository = extern struct { if (!formatter.repository.resolved.isEmpty()) { try writer.writeAll("#"); var resolved = formatter.repository.resolved.slice(formatter.buf); - if (std.mem.lastIndexOfScalar(u8, resolved, '-')) |i| { + if (strings.lastIndexOfChar(resolved, '-')) |i| { resolved = resolved[i + 1 ..]; } try writer.writeAll(resolved); diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 1bd88e78d..b1adb3d80 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -20,7 +20,7 @@ pub const Resolution = extern struct { rhs_buf: []const u8, ) std.math.Order { if (lhs.tag != rhs.tag) { - return std.math.order(@enumToInt(lhs.tag), @enumToInt(rhs.tag)); + return std.math.order(@intFromEnum(lhs.tag), @intFromEnum(rhs.tag)); } return switch (lhs.tag) { diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index 90a2a414c..0ece2be7f 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -30,7 +30,7 @@ pub const FolderResolution = union(Tag) { } pub fn hash(normalized_path: string) u64 { - return std.hash.Wyhash.hash(0, normalized_path); + return bun.hash(normalized_path); } fn NewResolver(comptime tag: Resolution.Tag) type { @@ -95,9 +95,9 @@ pub const FolderResolution = union(Tag) { .global, .cache_folder => { const path = if (global_or_relative == .global) global_or_relative.global else global_or_relative.cache_folder; if (path.len > 0) { - const offset = path.len -| @as(usize, @boolToInt(path[path.len -| 1] == std.fs.path.sep)); + const offset = path.len -| @as(usize, @intFromBool(path[path.len -| 1] == std.fs.path.sep)); if (offset > 0) - @memcpy(remain.ptr, path.ptr, offset); + @memcpy(remain[0..offset], path[0..offset]); remain = remain[offset..]; if (normalized.len > 0) { if ((path[path.len - 1] != std.fs.path.sep) and (normalized[0] != std.fs.path.sep)) { diff --git a/src/install/semver.zig b/src/install/semver.zig index 9fd6b9c8d..c2730e7c7 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -263,7 +263,7 @@ pub const String = extern struct { std.debug.assert(bun.isSliceInBuffer(in, buf)); return Pointer{ - .off = @truncate(u32, @ptrToInt(in.ptr) - @ptrToInt(buf.ptr)), + .off = @truncate(u32, @intFromPtr(in.ptr) - @intFromPtr(buf.ptr)), .len = @truncate(u32, in.len), }; } @@ -311,7 +311,7 @@ pub const String = extern struct { pub const StringPool = std.HashMap(u64, String, IdentityContext(u64), 80); pub inline fn stringHash(buf: []const u8) u64 { - return std.hash.Wyhash.hash(0, buf); + return bun.Wyhash.hash(0, buf); } pub inline fn count(this: *Builder, slice_: string) void { @@ -515,7 +515,7 @@ pub const ExternalString = extern struct { pub inline fn from(in: string) ExternalString { return ExternalString{ .value = String.init(in, in), - .hash = std.hash.Wyhash.hash(0, in), + .hash = bun.Wyhash.hash(0, in), }; } @@ -552,15 +552,15 @@ pub const BigExternalString = extern struct { return BigExternalString{ .off = 0, .len = @truncate(u32, in.len), - .hash = std.hash.Wyhash.hash(0, in), + .hash = bun.Wyhash.hash(0, in), }; } pub inline fn init(buf: string, in: string, hash: u64) BigExternalString { - std.debug.assert(@ptrToInt(buf.ptr) <= @ptrToInt(in.ptr) and ((@ptrToInt(in.ptr) + in.len) <= (@ptrToInt(buf.ptr) + buf.len))); + std.debug.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr) and ((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len))); return BigExternalString{ - .off = @truncate(u32, @ptrToInt(in.ptr) - @ptrToInt(buf.ptr)), + .off = @truncate(u32, @intFromPtr(in.ptr) - @intFromPtr(buf.ptr)), .len = @truncate(u32, in.len), .hash = hash, }; @@ -580,19 +580,19 @@ pub const SlicedString = struct { } pub inline fn external(this: SlicedString) ExternalString { - if (comptime Environment.allow_assert) std.debug.assert(@ptrToInt(this.buf.ptr) <= @ptrToInt(this.slice.ptr) and ((@ptrToInt(this.slice.ptr) + this.slice.len) <= (@ptrToInt(this.buf.ptr) + this.buf.len))); + if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len))); - return ExternalString.init(this.buf, this.slice, std.hash.Wyhash.hash(0, this.slice)); + return ExternalString.init(this.buf, this.slice, bun.Wyhash.hash(0, this.slice)); } pub inline fn value(this: SlicedString) String { - if (comptime Environment.allow_assert) std.debug.assert(@ptrToInt(this.buf.ptr) <= @ptrToInt(this.slice.ptr) and ((@ptrToInt(this.slice.ptr) + this.slice.len) <= (@ptrToInt(this.buf.ptr) + this.buf.len))); + if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len))); return String.init(this.buf, this.slice); } pub inline fn sub(this: SlicedString, input: string) SlicedString { - std.debug.assert(@ptrToInt(this.buf.ptr) <= @ptrToInt(this.buf.ptr) and ((@ptrToInt(input.ptr) + input.len) <= (@ptrToInt(this.buf.ptr) + this.buf.len))); + std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.buf.ptr) and ((@intFromPtr(input.ptr) + input.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len))); return SlicedString{ .buf = this.buf, .slice = input }; } }; @@ -678,7 +678,7 @@ pub const Version = extern struct { .build = this.tag.build.hash, }; const bytes = std.mem.asBytes(&hashable); - return std.hash.Wyhash.hash(0, bytes); + return bun.Wyhash.hash(0, bytes); } pub const Formatter = struct { @@ -1814,7 +1814,7 @@ pub const Query = struct { }; if (!hyphenate) i = rollback; - i += @as(usize, @boolToInt(!hyphenate)); + i += @as(usize, @intFromBool(!hyphenate)); if (hyphenate) { const second_parsed = Version.parse(sliced.sub(input[i..]), allocator); |