aboutsummaryrefslogtreecommitdiff
path: root/src/install/lockfile.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/install/lockfile.zig')
-rw-r--r--src/install/lockfile.zig148
1 files changed, 81 insertions, 67 deletions
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index 202cfddc0..cf0ee8267 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -226,23 +226,23 @@ pub const Tree = struct {
pub fn toExternal(this: Tree) External {
var out = External{};
- out[0..4].* = @bitCast(Id, this.id);
- out[4..8].* = @bitCast(Id, this.dependency_id);
- out[8..12].* = @bitCast(Id, this.parent);
- out[12..16].* = @bitCast(u32, this.dependencies.off);
- out[16..20].* = @bitCast(u32, this.dependencies.len);
+ out[0..4].* = @as(Id, @bitCast(this.id));
+ out[4..8].* = @as(Id, @bitCast(this.dependency_id));
+ out[8..12].* = @as(Id, @bitCast(this.parent));
+ out[12..16].* = @as(u32, @bitCast(this.dependencies.off));
+ out[16..20].* = @as(u32, @bitCast(this.dependencies.len));
if (out.len != 20) @compileError("Tree.External is not 20 bytes");
return out;
}
pub fn toTree(out: External) Tree {
return .{
- .id = @bitCast(Id, out[0..4].*),
- .dependency_id = @bitCast(Id, out[4..8].*),
- .parent = @bitCast(Id, out[8..12].*),
+ .id = @as(Id, @bitCast(out[0..4].*)),
+ .dependency_id = @as(Id, @bitCast(out[4..8].*)),
+ .parent = @as(Id, @bitCast(out[8..12].*)),
.dependencies = .{
- .off = @bitCast(u32, out[12..16].*),
- .len = @bitCast(u32, out[16..20].*),
+ .off = @as(u32, @bitCast(out[12..16].*)),
+ .len = @as(u32, @bitCast(out[16..20].*)),
},
};
}
@@ -306,7 +306,7 @@ pub const Tree = struct {
if (tree.id > 0) {
var depth_buf_len: usize = 1;
- while (parent_id > 0 and parent_id < @intCast(Id, this.trees.len)) {
+ while (parent_id > 0 and parent_id < @as(Id, @intCast(this.trees.len))) {
this.depth_stack[depth_buf_len] = parent_id;
parent_id = this.trees[parent_id].parent;
depth_buf_len += 1;
@@ -374,7 +374,7 @@ pub const Tree = struct {
/// Flatten the multi-dimensional ArrayList of package IDs into a single easily serializable array
pub fn clean(this: *Builder) !DependencyIDList {
- const end = @truncate(Id, this.list.len);
+ const end = @as(Id, @truncate(this.list.len));
var i: Id = 0;
var total: u32 = 0;
var trees = this.list.items(.tree);
@@ -389,7 +389,7 @@ pub const Tree = struct {
for (trees, dependencies) |*tree, *child| {
if (tree.dependencies.len > 0) {
- const len = @truncate(PackageID, child.items.len);
+ const len = @as(PackageID, @truncate(child.items.len));
next.off += next.len;
next.len = len;
tree.dependencies = next;
@@ -419,7 +419,7 @@ pub const Tree = struct {
try builder.list.append(builder.allocator, .{
.tree = .{
.parent = this.id,
- .id = @truncate(Id, builder.list.len),
+ .id = @as(Id, @truncate(builder.list.len)),
.dependency_id = dependency_id,
},
.dependencies = .{},
@@ -430,7 +430,7 @@ pub const Tree = struct {
const dependency_lists = list_slice.items(.dependencies);
const next: *Tree = &trees[builder.list.len - 1];
const name_hashes: []const PackageNameHash = builder.name_hashes;
- const max_package_id = @truncate(PackageID, name_hashes.len);
+ const max_package_id = @as(PackageID, @truncate(name_hashes.len));
var dep_id = resolution_list.off;
const end = dep_id + resolution_list.len;
@@ -545,7 +545,7 @@ pub fn maybeCloneFilteringRootPackages(
const root_dependencies = old_root_dependenices_list.get(old.buffers.dependencies.items);
var resolutions = old_root_resolutions.mut(old.buffers.resolutions.items);
var any_changes = false;
- const end = @truncate(PackageID, old.packages.len);
+ const end = @as(PackageID, @truncate(old.packages.len));
for (root_dependencies, resolutions) |dependency, *resolution| {
if (!dependency.behavior.isEnabled(features) and resolution.* < end) {
@@ -1022,7 +1022,7 @@ pub const Printer = struct {
defer if (id_map.len > 0) default_allocator.free(id_map);
visited.set(0);
- const end = @truncate(PackageID, resolved.len);
+ const end = @as(PackageID, @truncate(resolved.len));
if (this.successfully_installed) |installed| {
var dep_id = resolutions_list[0].off;
@@ -1039,7 +1039,7 @@ pub const Printer = struct {
if (update.failed) return;
if (update.matches(dependency, string_buf)) {
if (dependency_id.* == invalid_package_id) {
- dependency_id.* = @truncate(DependencyID, dep_id);
+ dependency_id.* = @as(DependencyID, @truncate(dep_id));
}
continue :outer;
@@ -1076,7 +1076,7 @@ pub const Printer = struct {
if (update.failed) return;
if (update.matches(dependency, string_buf)) {
if (dependency_id.* == invalid_package_id) {
- dependency_id.* = @truncate(DependencyID, dep_id);
+ dependency_id.* = @as(DependencyID, @truncate(dep_id));
}
continue :outer;
@@ -1202,7 +1202,7 @@ pub const Printer = struct {
var requested_versions = RequestedVersion.init(this.lockfile.allocator);
var all_requested_versions = try this.lockfile.allocator.alloc(Dependency.Version, resolutions_buffer.len);
defer this.lockfile.allocator.free(all_requested_versions);
- const package_count = @truncate(PackageID, names.len);
+ const package_count = @as(PackageID, @truncate(names.len));
var alphabetized_names = try this.lockfile.allocator.alloc(PackageID, package_count - 1);
defer this.lockfile.allocator.free(alphabetized_names);
@@ -1393,7 +1393,7 @@ pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_featu
const dependency_lists: []const DependencySlice = this.packages.items(.dependencies);
const dependencies_buffer = this.buffers.dependencies.items;
const resolutions_buffer = this.buffers.resolutions.items;
- const end = @truncate(PackageID, this.packages.len);
+ const end = @as(PackageID, @truncate(this.packages.len));
var any_failed = false;
const string_buf = this.buffers.string_bytes.items;
@@ -1403,7 +1403,7 @@ pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_featu
for (resolution_list.get(resolutions_buffer), dependency_list.get(dependencies_buffer)) |package_id, failed_dep| {
if (package_id < end) continue;
if (failed_dep.behavior.isPeer() or !failed_dep.behavior.isEnabled(
- if (root_list.contains(@truncate(PackageID, parent_id)))
+ if (root_list.contains(@as(PackageID, @truncate(parent_id))))
local_features
else
remote_features,
@@ -1446,7 +1446,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ) void {
tmpname_buf[0..8].* = "bunlock-".*;
var tmpfile = FileSystem.RealFS.Tmpfile{};
var secret: [32]u8 = undefined;
- std.mem.writeIntNative(u64, secret[0..8], @intCast(u64, std.time.milliTimestamp()));
+ std.mem.writeIntNative(u64, secret[0..8], @as(u64, @intCast(std.time.milliTimestamp())));
var base64_bytes: [64]u8 = undefined;
std.crypto.random.bytes(&base64_bytes);
@@ -1588,7 +1588,7 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v
}
pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Package {
- const id = @truncate(PackageID, this.packages.len);
+ const id = @as(PackageID, @truncate(this.packages.len));
return try appendPackageWithID(this, package_, id);
}
@@ -1929,11 +1929,11 @@ pub const Package = extern struct {
field: string,
behavior: Behavior,
- pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @enumFromInt(Behavior, Behavior.normal) };
- pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @enumFromInt(Behavior, Behavior.dev) };
- pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @enumFromInt(Behavior, Behavior.optional) };
- pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @enumFromInt(Behavior, Behavior.peer) };
- pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @enumFromInt(Behavior, Behavior.workspace) };
+ pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.normal)) };
+ pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.dev)) };
+ pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.optional)) };
+ pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.peer)) };
+ pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @as(Behavior, @enumFromInt(Behavior.workspace)) };
};
pub inline fn isDisabled(this: *const Lockfile.Package) bool {
@@ -1988,9 +1988,9 @@ pub const Package = extern struct {
try new.buffers.resolutions.ensureUnusedCapacity(new.allocator, old_dependencies.len);
try new.buffers.extern_strings.ensureUnusedCapacity(new.allocator, new_extern_string_count);
- const prev_len = @truncate(u32, new.buffers.dependencies.items.len);
- const end = prev_len + @truncate(u32, old_dependencies.len);
- const max_package_id = @truncate(PackageID, old.packages.len);
+ const prev_len = @as(u32, @truncate(new.buffers.dependencies.items.len));
+ const end = prev_len + @as(u32, @truncate(old_dependencies.len));
+ const max_package_id = @as(PackageID, @truncate(old.packages.len));
new.buffers.dependencies.items = new.buffers.dependencies.items.ptr[0..end];
new.buffers.resolutions.items = new.buffers.resolutions.items.ptr[0..end];
@@ -2001,7 +2001,7 @@ pub const Package = extern struct {
var dependencies: []Dependency = new.buffers.dependencies.items[prev_len..end];
var resolutions: []PackageID = new.buffers.resolutions.items[prev_len..end];
- const id = @truncate(PackageID, new.packages.len);
+ const id = @as(PackageID, @truncate(new.packages.len));
const new_package = try new.appendPackageWithID(
.{
.name = builder.appendWithHash(
@@ -2068,7 +2068,7 @@ pub const Package = extern struct {
try cloner.clone_queue.append(.{
.old_resolution = old_resolution,
.parent = new_package.meta.id,
- .resolve_id = new_package.resolutions.off + @intCast(PackageID, i),
+ .resolve_id = new_package.resolutions.off + @as(PackageID, @intCast(i)),
});
}
}
@@ -2150,8 +2150,8 @@ pub const Package = extern struct {
package.meta.arch = package_json.arch;
package.meta.os = package_json.os;
- package.dependencies.off = @truncate(u32, dependencies_list.items.len);
- package.dependencies.len = total_dependencies_count - @truncate(u32, dependencies.len);
+ package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len));
+ package.dependencies.len = total_dependencies_count - @as(u32, @truncate(dependencies.len));
package.resolutions.off = package.dependencies.off;
package.resolutions.len = package.dependencies.len;
@@ -2338,7 +2338,7 @@ pub const Package = extern struct {
package.meta.integrity = package_version.integrity;
- package.dependencies.off = @truncate(u32, dependencies_list.items.len);
+ package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len));
package.dependencies.len = total_dependencies_count;
package.resolutions.off = package.dependencies.off;
package.resolutions.len = package.dependencies.len;
@@ -2407,7 +2407,7 @@ pub const Package = extern struct {
};
if (to_deps[to_i].eql(from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items)) {
- mapping[to_i] = @truncate(PackageID, i);
+ mapping[to_i] = @as(PackageID, @truncate(i));
continue;
}
@@ -2548,7 +2548,7 @@ pub const Package = extern struct {
),
);
defer dependency_version.value.workspace = path;
- var workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, @truncate(u32, external_name.hash));
+ var workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, @as(u32, @truncate(external_name.hash)));
if (workspace_entry.found_existing) {
const old_path = workspace_entry.value_ptr.*;
@@ -2931,7 +2931,7 @@ pub const Package = extern struct {
.values = workspace_names.values(),
});
- return @truncate(u32, workspace_names.count());
+ return @as(u32, @truncate(workspace_names.count()));
}
fn parseWithJSON(
@@ -3128,7 +3128,7 @@ pub const Package = extern struct {
else => {},
}
}
- total_dependencies_count += @truncate(u32, obj.properties.len);
+ total_dependencies_count += @as(u32, @truncate(obj.properties.len));
},
else => {
if (group.behavior.isWorkspace()) {
@@ -3167,7 +3167,7 @@ pub const Package = extern struct {
, .{}) catch {};
return error.InvalidPackageJSON;
};
- lockfile.trusted_dependencies.putAssumeCapacity(@truncate(u32, String.Builder.stringHash(name)), {});
+ lockfile.trusted_dependencies.putAssumeCapacity(@as(u32, @truncate(String.Builder.stringHash(name))), {});
}
},
else => {
@@ -3313,7 +3313,7 @@ pub const Package = extern struct {
}
total_dependencies_count = 0;
- const in_workspace = lockfile.workspace_paths.contains(@truncate(u32, package.name_hash));
+ const in_workspace = lockfile.workspace_paths.contains(@as(u32, @truncate(package.name_hash)));
inline for (dependency_groups) |group| {
if (group.behavior.isWorkspace()) {
@@ -3354,7 +3354,7 @@ pub const Package = extern struct {
var tag: ?Dependency.Version.Tag = null;
var workspace_path: ?String = null;
- if (lockfile.workspace_paths.get(@truncate(u32, external_name.hash))) |path| {
+ if (lockfile.workspace_paths.get(@as(u32, @truncate(external_name.hash)))) |path| {
tag = .workspace;
workspace_path = path;
}
@@ -3395,10 +3395,10 @@ pub const Package = extern struct {
Dependency.isLessThan,
);
- package.dependencies.off = @truncate(u32, off);
- package.dependencies.len = @truncate(u32, total_dependencies_count);
+ package.dependencies.off = @as(u32, @truncate(off));
+ package.dependencies.len = @as(u32, @truncate(total_dependencies_count));
- package.resolutions = @bitCast(@TypeOf(package.resolutions), package.dependencies);
+ package.resolutions = @as(@TypeOf(package.resolutions), @bitCast(package.dependencies));
@memset(lockfile.buffers.resolutions.items.ptr[off..total_len], invalid_package_id);
@@ -3458,14 +3458,20 @@ pub const Package = extern struct {
.alignment = if (@sizeOf(field_info.type) == 0) 1 else field_info.alignment,
};
}
- const Sort = struct {
- fn lessThan(trash: *i32, comptime lhs: Data, comptime rhs: Data) bool {
- _ = trash;
- return lhs.alignment > rhs.alignment;
+ const SortContext = struct {
+ data: []Data,
+ pub fn swap(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) void {
+ const tmp = ctx.data[lhs];
+ ctx.data[lhs] = ctx.data[rhs];
+ ctx.data[rhs] = tmp;
+ }
+ pub fn lessThan(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) bool {
+ return ctx.data[lhs].alignment > ctx.data[rhs].alignment;
}
};
- var trash: i32 = undefined; // workaround for stage1 compiler bug
- std.sort.block(Data, &data, &trash, Sort.lessThan);
+ std.sort.insertionContext(0, fields.len, SortContext{
+ .data = &data,
+ });
var sizes_bytes: [fields.len]usize = undefined;
var field_indexes: [fields.len]usize = undefined;
var Types: [fields.len]type = undefined;
@@ -3485,7 +3491,7 @@ pub const Package = extern struct {
pub fn byteSize(list: Lockfile.Package.List) usize {
const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes;
- const capacity_vector = @splat(sizes.bytes.len, list.len);
+ const capacity_vector: @Vector(sizes.bytes.len, usize) = @splat(list.len);
return @reduce(.Add, capacity_vector * sizes_vector);
}
@@ -3626,14 +3632,22 @@ const Buffers = struct {
.type = field_info.type.Slice,
};
}
- const Sort = struct {
- fn lessThan(trash: *i32, comptime lhs: Data, comptime rhs: Data) bool {
- _ = trash;
- return lhs.alignment > rhs.alignment;
+
+ const SortContext = struct {
+ data: []Data,
+ pub fn swap(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) void {
+ const tmp = ctx.data[lhs];
+ ctx.data[lhs] = ctx.data[rhs];
+ ctx.data[rhs] = tmp;
+ }
+ pub fn lessThan(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) bool {
+ return ctx.data[lhs].alignment > ctx.data[rhs].alignment;
}
};
- var trash: i32 = undefined; // workaround for stage1 compiler bug
- std.sort.block(Data, &data, &trash, Sort.lessThan);
+
+ std.sort.insertionContext(0, fields.len, SortContext{
+ .data = &data,
+ });
var sizes_bytes: [fields.len]usize = undefined;
var names: [fields.len][]const u8 = undefined;
var types: [fields.len]type = undefined;
@@ -3673,7 +3687,7 @@ const Buffers = struct {
const misaligned = std.mem.bytesAsSlice(PointerType, stream.buffer[start_pos..end_pos]);
return ArrayList{
- .items = try allocator.dupe(PointerType, @alignCast(@alignOf([*]PointerType), misaligned.ptr)[0..misaligned.len]),
+ .items = try allocator.dupe(PointerType, @as([*]PointerType, @alignCast(misaligned.ptr))[0..misaligned.len]),
.capacity = misaligned.len,
};
}
@@ -3774,7 +3788,7 @@ const Buffers = struct {
if (visited.isSet(dep_id)) continue;
visited.set(dep_id);
}
- return @truncate(DependencyID, dep_id);
+ return @as(DependencyID, @truncate(dep_id));
}
},
}
@@ -3940,16 +3954,16 @@ pub const Serializer = struct {
{
lockfile.package_index = PackageIndex.Map.initContext(allocator, .{});
lockfile.string_pool = StringPool.initContext(allocator, .{});
- try lockfile.package_index.ensureTotalCapacity(@truncate(u32, lockfile.packages.len));
+ try lockfile.package_index.ensureTotalCapacity(@as(u32, @truncate(lockfile.packages.len)));
const slice = lockfile.packages.slice();
const name_hashes = slice.items(.name_hash);
const resolutions = slice.items(.resolution);
for (name_hashes, resolutions, 0..) |name_hash, resolution, id| {
- try lockfile.getOrPutID(@truncate(PackageID, id), name_hash);
+ try lockfile.getOrPutID(@as(PackageID, @truncate(id)), name_hash);
switch (resolution.tag) {
.workspace => {
- try lockfile.workspace_paths.put(allocator, @truncate(u32, name_hash), resolution.value.workspace);
+ try lockfile.workspace_paths.put(allocator, @as(u32, @truncate(name_hash)), resolution.value.workspace);
},
else => {},
}
@@ -3986,13 +4000,13 @@ fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash
while (i + 16 < this.packages.len) : (i += 16) {
comptime var j: usize = 0;
inline while (j < 16) : (j += 1) {
- alphabetized_names[(i + j) - 1] = @truncate(PackageID, (i + j));
+ alphabetized_names[(i + j) - 1] = @as(PackageID, @truncate((i + j)));
string_builder.fmtCount("{s}@{}\n", .{ names[i + j].slice(bytes), resolutions[i + j].fmt(bytes) });
}
}
while (i < this.packages.len) : (i += 1) {
- alphabetized_names[i - 1] = @truncate(PackageID, i);
+ alphabetized_names[i - 1] = @as(PackageID, @truncate(i));
string_builder.fmtCount("{s}@{}\n", .{ names[i].slice(bytes), resolutions[i].fmt(bytes) });
}
}