aboutsummaryrefslogtreecommitdiff
path: root/src/install
diff options
context:
space:
mode:
Diffstat (limited to 'src/install')
-rw-r--r--src/install/bin.zig6
-rw-r--r--src/install/dependency.zig28
-rw-r--r--src/install/extract_tarball.zig2
-rw-r--r--src/install/install.zig90
-rw-r--r--src/install/integrity.zig2
-rw-r--r--src/install/lockfile.zig148
-rw-r--r--src/install/npm.zig32
-rw-r--r--src/install/semver.zig62
8 files changed, 192 insertions, 178 deletions
diff --git a/src/install/bin.zig b/src/install/bin.zig
index f8117c1e8..adbd5f4d3 100644
--- a/src/install/bin.zig
+++ b/src/install/bin.zig
@@ -56,7 +56,7 @@ pub const Bin = extern struct {
for (list) |*extern_string| {
builder.count(extern_string.slice(buf));
}
- return @truncate(u32, list.len);
+ return @as(u32, @truncate(list.len));
},
else => {},
}
@@ -461,7 +461,7 @@ pub const Bin = extern struct {
var dir = std.fs.Dir{ .fd = this.package_installed_node_modules };
var joined = Path.joinStringBuf(&target_buf, &parts, .auto);
- @ptrFromInt([*]u8, @intFromPtr(joined.ptr))[joined.len] = 0;
+ @as([*]u8, @ptrFromInt(@intFromPtr(joined.ptr)))[joined.len] = 0;
var joined_: [:0]const u8 = joined.ptr[0..joined.len :0];
var child_dir = bun.openDir(dir, joined_) catch |err| {
this.err = err;
@@ -613,7 +613,7 @@ pub const Bin = extern struct {
var dir = std.fs.Dir{ .fd = this.package_installed_node_modules };
var joined = Path.joinStringBuf(&target_buf, &parts, .auto);
- @ptrFromInt([*]u8, @intFromPtr(joined.ptr))[joined.len] = 0;
+ @as([*]u8, @ptrFromInt(@intFromPtr(joined.ptr)))[joined.len] = 0;
var joined_: [:0]const u8 = joined.ptr[0..joined.len :0];
var child_dir = bun.openDir(dir, joined_) catch |err| {
this.err = err;
diff --git a/src/install/dependency.zig b/src/install/dependency.zig
index 6ef02bbfc..8578e998a 100644
--- a/src/install/dependency.zig
+++ b/src/install/dependency.zig
@@ -146,8 +146,8 @@ pub fn toDependency(
};
return Dependency{
.name = name,
- .name_hash = @bitCast(u64, this[8..16].*),
- .behavior = @enumFromInt(Dependency.Behavior, this[16]),
+ .name_hash = @as(u64, @bitCast(this[8..16].*)),
+ .behavior = @as(Dependency.Behavior, @enumFromInt(this[16])),
.version = Dependency.Version.toVersion(name, this[17..this.len].*, ctx),
};
}
@@ -155,7 +155,7 @@ pub fn toDependency(
pub fn toExternal(this: Dependency) External {
var bytes: External = undefined;
bytes[0..this.name.bytes.len].* = this.name.bytes;
- bytes[8..16].* = @bitCast([8]u8, this.name_hash);
+ bytes[8..16].* = @as([8]u8, @bitCast(this.name_hash));
bytes[16] = @intFromEnum(this.behavior);
bytes[17..bytes.len].* = this.version.toExternal();
return bytes;
@@ -265,7 +265,7 @@ pub const Version = struct {
ctx: Dependency.Context,
) Dependency.Version {
const slice = String{ .bytes = bytes[1..9].* };
- const tag = @enumFromInt(Dependency.Version.Tag, bytes[0]);
+ const tag = @as(Dependency.Version.Tag, @enumFromInt(bytes[0]));
const sliced = &slice.sliced(ctx.buffer);
return Dependency.parseWithTag(
ctx.allocator,
@@ -920,41 +920,41 @@ pub const Behavior = enum(u8) {
pub inline fn setNormal(this: Behavior, value: bool) Behavior {
if (value) {
- return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.normal);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.normal));
} else {
- return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.normal);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.normal));
}
}
pub inline fn setOptional(this: Behavior, value: bool) Behavior {
if (value) {
- return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.optional);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.optional));
} else {
- return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.optional);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.optional));
}
}
pub inline fn setDev(this: Behavior, value: bool) Behavior {
if (value) {
- return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.dev);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.dev));
} else {
- return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.dev);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.dev));
}
}
pub inline fn setPeer(this: Behavior, value: bool) Behavior {
if (value) {
- return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.peer);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.peer));
} else {
- return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.peer);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.peer));
}
}
pub inline fn setWorkspace(this: Behavior, value: bool) Behavior {
if (value) {
- return @enumFromInt(Behavior, @intFromEnum(this) | Behavior.workspace);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) | Behavior.workspace));
} else {
- return @enumFromInt(Behavior, @intFromEnum(this) & ~Behavior.workspace);
+ return @as(Behavior, @enumFromInt(@intFromEnum(this) & ~Behavior.workspace));
}
}
diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig
index 3be00853f..a533a92a7 100644
--- a/src/install/extract_tarball.zig
+++ b/src/install/extract_tarball.zig
@@ -341,7 +341,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
if (switch (this.resolution.tag) {
// TODO remove extracted files not matching any globs under "files"
.github, .local_tarball, .remote_tarball => true,
- else => this.package_manager.lockfile.trusted_dependencies.contains(@truncate(u32, Semver.String.Builder.stringHash(name))),
+ else => this.package_manager.lockfile.trusted_dependencies.contains(@as(u32, @truncate(Semver.String.Builder.stringHash(name)))),
}) {
const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| {
this.package_manager.log.addErrorFmt(
diff --git a/src/install/install.zig b/src/install/install.zig
index 65f4a35e9..a71ad6a5a 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -151,8 +151,8 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type
// }
return Slice{
- .off = @truncate(u32, (@intFromPtr(in.ptr) - @intFromPtr(buf.ptr)) / @sizeOf(Type)),
- .len = @truncate(u32, in.len),
+ .off = @as(u32, @truncate((@intFromPtr(in.ptr) - @intFromPtr(buf.ptr)) / @sizeOf(Type))),
+ .len = @as(u32, @truncate(in.len)),
};
}
};
@@ -346,12 +346,12 @@ const NetworkTask = struct {
try header_builder.entries.append(
allocator,
.{
- .name = .{ .offset = 0, .length = @truncate(u32, "Accept".len) },
- .value = .{ .offset = "Accept".len, .length = @truncate(u32, default_headers_buf.len - "Accept".len) },
+ .name = .{ .offset = 0, .length = @as(u32, @truncate("Accept".len)) },
+ .value = .{ .offset = "Accept".len, .length = @as(u32, @truncate(default_headers_buf.len - "Accept".len)) },
},
);
header_builder.header_count = 1;
- header_builder.content = GlobalStringBuilder{ .ptr = @ptrFromInt([*]u8, @intFromPtr(bun.span(default_headers_buf).ptr)), .len = default_headers_buf.len, .cap = default_headers_buf.len };
+ header_builder.content = GlobalStringBuilder{ .ptr = @as([*]u8, @ptrFromInt(@intFromPtr(bun.span(default_headers_buf).ptr))), .len = default_headers_buf.len, .cap = default_headers_buf.len };
}
this.response_buffer = try MutableString.init(allocator, 0);
@@ -479,7 +479,7 @@ pub const Features = struct {
out |= @as(u8, @intFromBool(this.dev_dependencies)) << 3;
out |= @as(u8, @intFromBool(this.peer_dependencies)) << 4;
out |= @as(u8, @intFromBool(this.workspaces)) << 5;
- return @enumFromInt(Behavior, out);
+ return @as(Behavior, @enumFromInt(out));
}
pub const main = Features{
@@ -545,28 +545,28 @@ const Task = struct {
hasher.update(package_name);
hasher.update("@");
hasher.update(std.mem.asBytes(&package_version));
- return @as(u64, 0 << 61) | @as(u64, @truncate(u61, hasher.final()));
+ return @as(u64, 0 << 61) | @as(u64, @as(u61, @truncate(hasher.final())));
}
pub fn forBinLink(package_id: PackageID) u64 {
const hash = bun.Wyhash.hash(0, std.mem.asBytes(&package_id));
- return @as(u64, 1 << 61) | @as(u64, @truncate(u61, hash));
+ return @as(u64, 1 << 61) | @as(u64, @as(u61, @truncate(hash)));
}
pub fn forManifest(name: string) u64 {
- return @as(u64, 2 << 61) | @as(u64, @truncate(u61, bun.Wyhash.hash(0, name)));
+ return @as(u64, 2 << 61) | @as(u64, @as(u61, @truncate(bun.Wyhash.hash(0, name))));
}
pub fn forTarball(url: string) u64 {
var hasher = bun.Wyhash.init(0);
hasher.update(url);
- return @as(u64, 3 << 61) | @as(u64, @truncate(u61, hasher.final()));
+ return @as(u64, 3 << 61) | @as(u64, @as(u61, @truncate(hasher.final())));
}
pub fn forGitClone(url: string) u64 {
var hasher = bun.Wyhash.init(0);
hasher.update(url);
- return @as(u64, 4 << 61) | @as(u64, @truncate(u61, hasher.final()));
+ return @as(u64, 4 << 61) | @as(u64, @as(u61, @truncate(hasher.final())));
}
pub fn forGitCheckout(url: string, resolved: string) u64 {
@@ -574,7 +574,7 @@ const Task = struct {
hasher.update(url);
hasher.update("@");
hasher.update(resolved);
- return @as(u64, 5 << 61) | @as(u64, @truncate(u61, hasher.final()));
+ return @as(u64, 5 << 61) | @as(u64, @as(u61, @truncate(hasher.final())));
}
};
@@ -1408,7 +1408,7 @@ const PackageInstall = struct {
const rc = Syscall.system.open(path, @as(u32, std.os.O.PATH | 0), @as(u32, 0));
switch (Syscall.getErrno(rc)) {
.SUCCESS => {
- const fd = @intCast(std.os.fd_t, rc);
+ const fd = @as(std.os.fd_t, @intCast(rc));
_ = Syscall.system.close(fd);
return false;
},
@@ -1746,7 +1746,7 @@ pub const PackageManager = struct {
version_buf: []const u8,
behavior: Dependency.Behavior,
) DependencyToEnqueue {
- const dep_id = @truncate(DependencyID, brk: {
+ const dep_id = @as(DependencyID, @truncate(brk: {
const str_buf = this.lockfile.buffers.string_bytes.items;
for (this.lockfile.buffers.dependencies.items, 0..) |dep, id| {
if (!strings.eqlLong(dep.name.slice(str_buf), name, true)) continue;
@@ -1772,7 +1772,7 @@ pub const PackageManager = struct {
this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable;
if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len);
break :brk index;
- });
+ }));
if (this.lockfile.buffers.resolutions.items[dep_id] == invalid_package_id) {
this.enqueueDependencyWithMainAndSuccessFn(
@@ -2806,7 +2806,7 @@ pub const PackageManager = struct {
tmpname_buf[0..8].* = "tmplock-".*;
var tmpfile = FileSystem.RealFS.Tmpfile{};
var secret: [32]u8 = undefined;
- std.mem.writeIntNative(u64, secret[0..8], @intCast(u64, std.time.milliTimestamp()));
+ std.mem.writeIntNative(u64, secret[0..8], @as(u64, @intCast(std.time.milliTimestamp())));
var base64_bytes: [64]u8 = undefined;
std.crypto.random.bytes(&base64_bytes);
@@ -3340,8 +3340,8 @@ pub const PackageManager = struct {
pub fn scheduleTasks(manager: *PackageManager) usize {
const count = manager.task_batch.len + manager.network_resolve_batch.len + manager.network_tarball_batch.len;
- manager.pending_tasks += @truncate(u32, count);
- manager.total_tasks += @truncate(u32, count);
+ manager.pending_tasks += @as(u32, @truncate(count));
+ manager.total_tasks += @as(u32, @truncate(count));
manager.thread_pool.schedule(manager.task_batch);
manager.network_resolve_batch.push(manager.network_tarball_batch);
HTTP.http_thread.schedule(manager.network_resolve_batch);
@@ -3799,7 +3799,7 @@ pub const PackageManager = struct {
if (comptime log_level.isVerbose()) {
Output.prettyError(" ", .{});
- Output.printElapsed(@floatFromInt(f64, task.http.elapsed) / std.time.ns_per_ms);
+ Output.printElapsed(@as(f64, @floatFromInt(task.http.elapsed)) / std.time.ns_per_ms);
Output.prettyError("\n <d>Downloaded <r><green>{s}<r> versions\n", .{name.slice()});
Output.flush();
}
@@ -3811,7 +3811,7 @@ pub const PackageManager = struct {
entry.value_ptr.* = manifest;
if (timestamp_this_tick == null) {
- timestamp_this_tick = @truncate(u32, @intCast(u64, @max(0, std.time.timestamp()))) +| 300;
+ timestamp_this_tick = @as(u32, @truncate(@as(u64, @intCast(@max(0, std.time.timestamp()))))) +| 300;
}
entry.value_ptr.*.pkg.public_max_age = timestamp_this_tick.?;
@@ -3933,7 +3933,7 @@ pub const PackageManager = struct {
if (comptime log_level.isVerbose()) {
Output.prettyError(" ", .{});
- Output.printElapsed(@floatCast(f64, @floatFromInt(f64, task.http.elapsed) / std.time.ns_per_ms));
+ Output.printElapsed(@as(f64, @floatCast(@as(f64, @floatFromInt(task.http.elapsed)) / std.time.ns_per_ms)));
Output.prettyError(" <d>Downloaded <r><green>{s}<r> tarball\n", .{extract.name.slice()});
Output.flush();
}
@@ -5219,7 +5219,7 @@ pub const PackageManager = struct {
Output.flush();
}
- var cpu_count = @truncate(u32, ((try std.Thread.getCpuCount()) + 1));
+ var cpu_count = @as(u32, @truncate(((try std.Thread.getCpuCount()) + 1)));
if (env.map.get("GOMAXPROCS")) |max_procs| {
if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count_| {
@@ -5275,7 +5275,7 @@ pub const PackageManager = struct {
ctx.install,
);
- manager.timestamp_for_manifest_cache_control = @truncate(u32, @intCast(u64, @max(std.time.timestamp(), 0)));
+ manager.timestamp_for_manifest_cache_control = @as(u32, @truncate(@as(u64, @intCast(@max(std.time.timestamp(), 0)))));
return manager;
}
@@ -5290,7 +5290,7 @@ pub const PackageManager = struct {
PackageManager.verbose_install = true;
}
- var cpu_count = @truncate(u32, ((try std.Thread.getCpuCount()) + 1));
+ var cpu_count = @as(u32, @truncate(((try std.Thread.getCpuCount()) + 1)));
if (env.map.get("GOMAXPROCS")) |max_procs| {
if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count_| {
@@ -5360,15 +5360,15 @@ pub const PackageManager = struct {
bun_install,
);
- manager.timestamp_for_manifest_cache_control = @truncate(
+ manager.timestamp_for_manifest_cache_control = @as(
u32,
- @intCast(
+ @truncate(@as(
u64,
- @max(
+ @intCast(@max(
std.time.timestamp(),
0,
- ),
- ),
+ )),
+ )),
// When using "bun install", we check for updates with a 300 second cache.
// When using bun, we only do staleness checks once per day
) -| std.time.s_per_day;
@@ -6101,15 +6101,15 @@ pub const PackageManager = struct {
if (manager.options.positionals.len == 1) {
var examples_to_print: [3]string = undefined;
- const off = @intCast(u64, std.time.milliTimestamp());
+ const off = @as(u64, @intCast(std.time.milliTimestamp()));
switch (op) {
.update, .add => {
const filler = @import("../cli.zig").HelpCommand.packages_to_add_filler;
- examples_to_print[0] = filler[@intCast(usize, (off) % filler.len)];
- examples_to_print[1] = filler[@intCast(usize, (off + 1) % filler.len)];
- examples_to_print[2] = filler[@intCast(usize, (off + 2) % filler.len)];
+ examples_to_print[0] = filler[@as(usize, @intCast((off) % filler.len))];
+ examples_to_print[1] = filler[@as(usize, @intCast((off + 1) % filler.len))];
+ examples_to_print[2] = filler[@as(usize, @intCast((off + 2) % filler.len))];
Output.prettyErrorln(
\\
@@ -6145,9 +6145,9 @@ pub const PackageManager = struct {
.remove => {
const filler = @import("../cli.zig").HelpCommand.packages_to_remove_filler;
- examples_to_print[0] = filler[@intCast(usize, (off) % filler.len)];
- examples_to_print[1] = filler[@intCast(usize, (off + 1) % filler.len)];
- examples_to_print[2] = filler[@intCast(usize, (off + 2) % filler.len)];
+ examples_to_print[0] = filler[@as(usize, @intCast((off) % filler.len))];
+ examples_to_print[1] = filler[@as(usize, @intCast((off + 1) % filler.len))];
+ examples_to_print[2] = filler[@as(usize, @intCast((off + 2) % filler.len))];
Output.prettyErrorln(
\\
@@ -6308,7 +6308,7 @@ pub const PackageManager = struct {
const changed = new_len != dependencies.len;
if (changed) {
- query.expr.data.e_object.properties.len = @truncate(u32, new_len);
+ query.expr.data.e_object.properties.len = @as(u32, @truncate(new_len));
// If the dependencies list is now empty, remove it from the package.json
// since we're swapRemove, we have to re-sort it
@@ -6781,7 +6781,7 @@ pub const PackageManager = struct {
}
}
- if (resolution.tag == .workspace or this.lockfile.trusted_dependencies.contains(@truncate(u32, String.Builder.stringHash(name)))) {
+ if (resolution.tag == .workspace or this.lockfile.trusted_dependencies.contains(@as(u32, @truncate(String.Builder.stringHash(name))))) {
var scripts = this.lockfile.packages.items(.scripts)[package_id];
if (scripts.hasAny()) {
var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
@@ -7497,8 +7497,8 @@ pub const PackageManager = struct {
new_dep.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder);
}
- const off = @truncate(u32, manager.lockfile.buffers.dependencies.items.len);
- const len = @truncate(u32, new_dependencies.len);
+ const off = @as(u32, @truncate(manager.lockfile.buffers.dependencies.items.len));
+ const len = @as(u32, @truncate(new_dependencies.len));
var packages = manager.lockfile.packages.slice();
var dep_lists = packages.items(.dependencies);
var resolution_lists = packages.items(.resolutions);
@@ -7536,7 +7536,7 @@ pub const PackageManager = struct {
if (manager.summary.add > 0 or manager.summary.update > 0) {
var remaining = mapping;
var dependency_i: PackageID = off;
- const changes = @truncate(PackageID, mapping.len);
+ const changes = @as(PackageID, @truncate(mapping.len));
_ = manager.getCacheDirectory();
_ = manager.getTemporaryDirectory();
@@ -7763,7 +7763,7 @@ pub const PackageManager = struct {
}
if (needs_new_lockfile) {
- manager.summary.add = @truncate(u32, manager.lockfile.packages.len);
+ manager.summary.add = @as(u32, @truncate(manager.lockfile.packages.len));
}
if (manager.options.do.save_yarn_lock) {
@@ -7812,9 +7812,9 @@ pub const PackageManager = struct {
// it's confusing when it shows 3 packages and says it installed 1
Output.pretty("\n <green>{d}<r> packages<r> installed ", .{@max(
install_summary.success,
- @truncate(
+ @as(
u32,
- manager.package_json_updates.len,
+ @truncate(manager.package_json_updates.len),
),
)});
Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp());
@@ -7838,7 +7838,7 @@ pub const PackageManager = struct {
} else if (install_summary.skipped > 0 and install_summary.fail == 0 and manager.package_json_updates.len == 0) {
Output.pretty("\n", .{});
- const count = @truncate(PackageID, manager.lockfile.packages.len);
+ const count = @as(PackageID, @truncate(manager.lockfile.packages.len));
if (count != install_summary.skipped) {
Output.pretty("Checked <green>{d} installs<r> across {d} packages <d>(no changes)<r> ", .{
install_summary.skipped,
diff --git a/src/install/integrity.zig b/src/install/integrity.zig
index 4634c2dfd..dd11140de 100644
--- a/src/install/integrity.zig
+++ b/src/install/integrity.zig
@@ -61,7 +61,7 @@ pub const Integrity = extern struct {
});
// parse hex integer
- integrity.value[out_i] = @truncate(u8, x0 << 4 | x1);
+ integrity.value[out_i] = @as(u8, @truncate(x0 << 4 | x1));
out_i += 1;
i += 1;
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index 202cfddc0..cf0ee8267 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -226,23 +226,23 @@ pub const Tree = struct {
pub fn toExternal(this: Tree) External {
var out = External{};
- out[0..4].* = @bitCast(Id, this.id);
- out[4..8].* = @bitCast(Id, this.dependency_id);
- out[8..12].* = @bitCast(Id, this.parent);
- out[12..16].* = @bitCast(u32, this.dependencies.off);
- out[16..20].* = @bitCast(u32, this.dependencies.len);
+ out[0..4].* = @as(Id, @bitCast(this.id));
+ out[4..8].* = @as(Id, @bitCast(this.dependency_id));
+ out[8..12].* = @as(Id, @bitCast(this.parent));
+ out[12..16].* = @as(u32, @bitCast(this.dependencies.off));
+ out[16..20].* = @as(u32, @bitCast(this.dependencies.len));
if (out.len != 20) @compileError("Tree.External is not 20 bytes");
return out;
}
pub fn toTree(out: External) Tree {
return .{
- .id = @bitCast(Id, out[0..4].*),
- .dependency_id = @bitCast(Id, out[4..8].*),
- .parent = @bitCast(Id, out[8..12].*),
+ .id = @as(Id, @bitCast(out[0..4].*)),
+ .dependency_id = @as(Id, @bitCast(out[4..8].*)),
+ .parent = @as(Id, @bitCast(out[8..12].*)),
.dependencies = .{
- .off = @bitCast(u32, out[12..16].*),
- .len = @bitCast(u32, out[16..20].*),
+ .off = @as(u32, @bitCast(out[12..16].*)),
+ .len = @as(u32, @bitCast(out[16..20].*)),
},
};
}
@@ -306,7 +306,7 @@ pub const Tree = struct {
if (tree.id > 0) {
var depth_buf_len: usize = 1;
- while (parent_id > 0 and parent_id < @intCast(Id, this.trees.len)) {
+ while (parent_id > 0 and parent_id < @as(Id, @intCast(this.trees.len))) {
this.depth_stack[depth_buf_len] = parent_id;
parent_id = this.trees[parent_id].parent;
depth_buf_len += 1;
@@ -374,7 +374,7 @@ pub const Tree = struct {
/// Flatten the multi-dimensional ArrayList of package IDs into a single easily serializable array
pub fn clean(this: *Builder) !DependencyIDList {
- const end = @truncate(Id, this.list.len);
+ const end = @as(Id, @truncate(this.list.len));
var i: Id = 0;
var total: u32 = 0;
var trees = this.list.items(.tree);
@@ -389,7 +389,7 @@ pub const Tree = struct {
for (trees, dependencies) |*tree, *child| {
if (tree.dependencies.len > 0) {
- const len = @truncate(PackageID, child.items.len);
+ const len = @as(PackageID, @truncate(child.items.len));
next.off += next.len;
next.len = len;
tree.dependencies = next;
@@ -419,7 +419,7 @@ pub const Tree = struct {
try builder.list.append(builder.allocator, .{
.tree = .{
.parent = this.id,
- .id = @truncate(Id, builder.list.len),
+ .id = @as(Id, @truncate(builder.list.len)),
.dependency_id = dependency_id,
},
.dependencies = .{},
@@ -430,7 +430,7 @@ pub const Tree = struct {
const dependency_lists = list_slice.items(.dependencies);
const next: *Tree = &trees[builder.list.len - 1];
const name_hashes: []const PackageNameHash = builder.name_hashes;
- const max_package_id = @truncate(PackageID, name_hashes.len);
+ const max_package_id = @as(PackageID, @truncate(name_hashes.len));
var dep_id = resolution_list.off;
const end = dep_id + resolution_list.len;
@@ -545,7 +545,7 @@ pub fn maybeCloneFilteringRootPackages(
const root_dependencies = old_root_dependenices_list.get(old.buffers.dependencies.items);
var resolutions = old_root_resolutions.mut(old.buffers.resolutions.items);
var any_changes = false;
- const end = @truncate(PackageID, old.packages.len);
+ const end = @as(PackageID, @truncate(old.packages.len));
for (root_dependencies, resolutions) |dependency, *resolution| {
if (!dependency.behavior.isEnabled(features) and resolution.* < end) {
@@ -1022,7 +1022,7 @@ pub const Printer = struct {
defer if (id_map.len > 0) default_allocator.free(id_map);
visited.set(0);
- const end = @truncate(PackageID, resolved.len);
+ const end = @as(PackageID, @truncate(resolved.len));
if (this.successfully_installed) |installed| {
var dep_id = resolutions_list[0].off;
@@ -1039,7 +1039,7 @@ pub const Printer = struct {
if (update.failed) return;
if (update.matches(dependency, string_buf)) {
if (dependency_id.* == invalid_package_id) {
- dependency_id.* = @truncate(DependencyID, dep_id);
+ dependency_id.* = @as(DependencyID, @truncate(dep_id));
}
continue :outer;
@@ -1076,7 +1076,7 @@ pub const Printer = struct {
if (update.failed) return;
if (update.matches(dependency, string_buf)) {
if (dependency_id.* == invalid_package_id) {
- dependency_id.* = @truncate(DependencyID, dep_id);
+ dependency_id.* = @as(DependencyID, @truncate(dep_id));
}
continue :outer;
@@ -1202,7 +1202,7 @@ pub const Printer = struct {
var requested_versions = RequestedVersion.init(this.lockfile.allocator);
var all_requested_versions = try this.lockfile.allocator.alloc(Dependency.Version, resolutions_buffer.len);
defer this.lockfile.allocator.free(all_requested_versions);
- const package_count = @truncate(PackageID, names.len);
+ const package_count = @as(PackageID, @truncate(names.len));
var alphabetized_names = try this.lockfile.allocator.alloc(PackageID, package_count - 1);
defer this.lockfile.allocator.free(alphabetized_names);
@@ -1393,7 +1393,7 @@ pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_featu
const dependency_lists: []const DependencySlice = this.packages.items(.dependencies);
const dependencies_buffer = this.buffers.dependencies.items;
const resolutions_buffer = this.buffers.resolutions.items;
- const end = @truncate(PackageID, this.packages.len);
+ const end = @as(PackageID, @truncate(this.packages.len));
var any_failed = false;
const string_buf = this.buffers.string_bytes.items;
@@ -1403,7 +1403,7 @@ pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_featu
for (resolution_list.get(resolutions_buffer), dependency_list.get(dependencies_buffer)) |package_id, failed_dep| {
if (package_id < end) continue;
if (failed_dep.behavior.isPeer() or !failed_dep.behavior.isEnabled(
- if (root_list.contains(@truncate(PackageID, parent_id)))
+ if (root_list.contains(@as(PackageID, @truncate(parent_id))))
local_features
else
remote_features,
@@ -1446,7 +1446,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ) void {
tmpname_buf[0..8].* = "bunlock-".*;
var tmpfile = FileSystem.RealFS.Tmpfile{};
var secret: [32]u8 = undefined;
- std.mem.writeIntNative(u64, secret[0..8], @intCast(u64, std.time.milliTimestamp()));
+ std.mem.writeIntNative(u64, secret[0..8], @as(u64, @intCast(std.time.milliTimestamp())));
var base64_bytes: [64]u8 = undefined;
std.crypto.random.bytes(&base64_bytes);
@@ -1588,7 +1588,7 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v
}
pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Package {
- const id = @truncate(PackageID, this.packages.len);
+ const id = @as(PackageID, @truncate(this.packages.len));
return try appendPackageWithID(this, package_, id);
}
@@ -1929,11 +1929,11 @@ pub const Package = extern struct {
field: string,
behavior: Behavior,
- pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @enumFromInt(Behavior, Behavior.normal) };
- pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @enumFromInt(Behavior, Behavior.dev) };
- pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @enumFromInt(Behavior, Behavior.optional) };
- pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @enumFromInt(Behavior, Behavior.peer) };
- pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @enumFromInt(Behavior, Behavior.workspace) };
+ pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.normal)) };
+ pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.dev)) };
+ pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.optional)) };
+ pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @as(Behavior, @enumFromInt(Behavior.peer)) };
+ pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @as(Behavior, @enumFromInt(Behavior.workspace)) };
};
pub inline fn isDisabled(this: *const Lockfile.Package) bool {
@@ -1988,9 +1988,9 @@ pub const Package = extern struct {
try new.buffers.resolutions.ensureUnusedCapacity(new.allocator, old_dependencies.len);
try new.buffers.extern_strings.ensureUnusedCapacity(new.allocator, new_extern_string_count);
- const prev_len = @truncate(u32, new.buffers.dependencies.items.len);
- const end = prev_len + @truncate(u32, old_dependencies.len);
- const max_package_id = @truncate(PackageID, old.packages.len);
+ const prev_len = @as(u32, @truncate(new.buffers.dependencies.items.len));
+ const end = prev_len + @as(u32, @truncate(old_dependencies.len));
+ const max_package_id = @as(PackageID, @truncate(old.packages.len));
new.buffers.dependencies.items = new.buffers.dependencies.items.ptr[0..end];
new.buffers.resolutions.items = new.buffers.resolutions.items.ptr[0..end];
@@ -2001,7 +2001,7 @@ pub const Package = extern struct {
var dependencies: []Dependency = new.buffers.dependencies.items[prev_len..end];
var resolutions: []PackageID = new.buffers.resolutions.items[prev_len..end];
- const id = @truncate(PackageID, new.packages.len);
+ const id = @as(PackageID, @truncate(new.packages.len));
const new_package = try new.appendPackageWithID(
.{
.name = builder.appendWithHash(
@@ -2068,7 +2068,7 @@ pub const Package = extern struct {
try cloner.clone_queue.append(.{
.old_resolution = old_resolution,
.parent = new_package.meta.id,
- .resolve_id = new_package.resolutions.off + @intCast(PackageID, i),
+ .resolve_id = new_package.resolutions.off + @as(PackageID, @intCast(i)),
});
}
}
@@ -2150,8 +2150,8 @@ pub const Package = extern struct {
package.meta.arch = package_json.arch;
package.meta.os = package_json.os;
- package.dependencies.off = @truncate(u32, dependencies_list.items.len);
- package.dependencies.len = total_dependencies_count - @truncate(u32, dependencies.len);
+ package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len));
+ package.dependencies.len = total_dependencies_count - @as(u32, @truncate(dependencies.len));
package.resolutions.off = package.dependencies.off;
package.resolutions.len = package.dependencies.len;
@@ -2338,7 +2338,7 @@ pub const Package = extern struct {
package.meta.integrity = package_version.integrity;
- package.dependencies.off = @truncate(u32, dependencies_list.items.len);
+ package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len));
package.dependencies.len = total_dependencies_count;
package.resolutions.off = package.dependencies.off;
package.resolutions.len = package.dependencies.len;
@@ -2407,7 +2407,7 @@ pub const Package = extern struct {
};
if (to_deps[to_i].eql(from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items)) {
- mapping[to_i] = @truncate(PackageID, i);
+ mapping[to_i] = @as(PackageID, @truncate(i));
continue;
}
@@ -2548,7 +2548,7 @@ pub const Package = extern struct {
),
);
defer dependency_version.value.workspace = path;
- var workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, @truncate(u32, external_name.hash));
+ var workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, @as(u32, @truncate(external_name.hash)));
if (workspace_entry.found_existing) {
const old_path = workspace_entry.value_ptr.*;
@@ -2931,7 +2931,7 @@ pub const Package = extern struct {
.values = workspace_names.values(),
});
- return @truncate(u32, workspace_names.count());
+ return @as(u32, @truncate(workspace_names.count()));
}
fn parseWithJSON(
@@ -3128,7 +3128,7 @@ pub const Package = extern struct {
else => {},
}
}
- total_dependencies_count += @truncate(u32, obj.properties.len);
+ total_dependencies_count += @as(u32, @truncate(obj.properties.len));
},
else => {
if (group.behavior.isWorkspace()) {
@@ -3167,7 +3167,7 @@ pub const Package = extern struct {
, .{}) catch {};
return error.InvalidPackageJSON;
};
- lockfile.trusted_dependencies.putAssumeCapacity(@truncate(u32, String.Builder.stringHash(name)), {});
+ lockfile.trusted_dependencies.putAssumeCapacity(@as(u32, @truncate(String.Builder.stringHash(name))), {});
}
},
else => {
@@ -3313,7 +3313,7 @@ pub const Package = extern struct {
}
total_dependencies_count = 0;
- const in_workspace = lockfile.workspace_paths.contains(@truncate(u32, package.name_hash));
+ const in_workspace = lockfile.workspace_paths.contains(@as(u32, @truncate(package.name_hash)));
inline for (dependency_groups) |group| {
if (group.behavior.isWorkspace()) {
@@ -3354,7 +3354,7 @@ pub const Package = extern struct {
var tag: ?Dependency.Version.Tag = null;
var workspace_path: ?String = null;
- if (lockfile.workspace_paths.get(@truncate(u32, external_name.hash))) |path| {
+ if (lockfile.workspace_paths.get(@as(u32, @truncate(external_name.hash)))) |path| {
tag = .workspace;
workspace_path = path;
}
@@ -3395,10 +3395,10 @@ pub const Package = extern struct {
Dependency.isLessThan,
);
- package.dependencies.off = @truncate(u32, off);
- package.dependencies.len = @truncate(u32, total_dependencies_count);
+ package.dependencies.off = @as(u32, @truncate(off));
+ package.dependencies.len = @as(u32, @truncate(total_dependencies_count));
- package.resolutions = @bitCast(@TypeOf(package.resolutions), package.dependencies);
+ package.resolutions = @as(@TypeOf(package.resolutions), @bitCast(package.dependencies));
@memset(lockfile.buffers.resolutions.items.ptr[off..total_len], invalid_package_id);
@@ -3458,14 +3458,20 @@ pub const Package = extern struct {
.alignment = if (@sizeOf(field_info.type) == 0) 1 else field_info.alignment,
};
}
- const Sort = struct {
- fn lessThan(trash: *i32, comptime lhs: Data, comptime rhs: Data) bool {
- _ = trash;
- return lhs.alignment > rhs.alignment;
+ const SortContext = struct {
+ data: []Data,
+ pub fn swap(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) void {
+ const tmp = ctx.data[lhs];
+ ctx.data[lhs] = ctx.data[rhs];
+ ctx.data[rhs] = tmp;
+ }
+ pub fn lessThan(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) bool {
+ return ctx.data[lhs].alignment > ctx.data[rhs].alignment;
}
};
- var trash: i32 = undefined; // workaround for stage1 compiler bug
- std.sort.block(Data, &data, &trash, Sort.lessThan);
+ std.sort.insertionContext(0, fields.len, SortContext{
+ .data = &data,
+ });
var sizes_bytes: [fields.len]usize = undefined;
var field_indexes: [fields.len]usize = undefined;
var Types: [fields.len]type = undefined;
@@ -3485,7 +3491,7 @@ pub const Package = extern struct {
pub fn byteSize(list: Lockfile.Package.List) usize {
const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes;
- const capacity_vector = @splat(sizes.bytes.len, list.len);
+ const capacity_vector: @Vector(sizes.bytes.len, usize) = @splat(list.len);
return @reduce(.Add, capacity_vector * sizes_vector);
}
@@ -3626,14 +3632,22 @@ const Buffers = struct {
.type = field_info.type.Slice,
};
}
- const Sort = struct {
- fn lessThan(trash: *i32, comptime lhs: Data, comptime rhs: Data) bool {
- _ = trash;
- return lhs.alignment > rhs.alignment;
+
+ const SortContext = struct {
+ data: []Data,
+ pub fn swap(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) void {
+ const tmp = ctx.data[lhs];
+ ctx.data[lhs] = ctx.data[rhs];
+ ctx.data[rhs] = tmp;
+ }
+ pub fn lessThan(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) bool {
+ return ctx.data[lhs].alignment > ctx.data[rhs].alignment;
}
};
- var trash: i32 = undefined; // workaround for stage1 compiler bug
- std.sort.block(Data, &data, &trash, Sort.lessThan);
+
+ std.sort.insertionContext(0, fields.len, SortContext{
+ .data = &data,
+ });
var sizes_bytes: [fields.len]usize = undefined;
var names: [fields.len][]const u8 = undefined;
var types: [fields.len]type = undefined;
@@ -3673,7 +3687,7 @@ const Buffers = struct {
const misaligned = std.mem.bytesAsSlice(PointerType, stream.buffer[start_pos..end_pos]);
return ArrayList{
- .items = try allocator.dupe(PointerType, @alignCast(@alignOf([*]PointerType), misaligned.ptr)[0..misaligned.len]),
+ .items = try allocator.dupe(PointerType, @as([*]PointerType, @alignCast(misaligned.ptr))[0..misaligned.len]),
.capacity = misaligned.len,
};
}
@@ -3774,7 +3788,7 @@ const Buffers = struct {
if (visited.isSet(dep_id)) continue;
visited.set(dep_id);
}
- return @truncate(DependencyID, dep_id);
+ return @as(DependencyID, @truncate(dep_id));
}
},
}
@@ -3940,16 +3954,16 @@ pub const Serializer = struct {
{
lockfile.package_index = PackageIndex.Map.initContext(allocator, .{});
lockfile.string_pool = StringPool.initContext(allocator, .{});
- try lockfile.package_index.ensureTotalCapacity(@truncate(u32, lockfile.packages.len));
+ try lockfile.package_index.ensureTotalCapacity(@as(u32, @truncate(lockfile.packages.len)));
const slice = lockfile.packages.slice();
const name_hashes = slice.items(.name_hash);
const resolutions = slice.items(.resolution);
for (name_hashes, resolutions, 0..) |name_hash, resolution, id| {
- try lockfile.getOrPutID(@truncate(PackageID, id), name_hash);
+ try lockfile.getOrPutID(@as(PackageID, @truncate(id)), name_hash);
switch (resolution.tag) {
.workspace => {
- try lockfile.workspace_paths.put(allocator, @truncate(u32, name_hash), resolution.value.workspace);
+ try lockfile.workspace_paths.put(allocator, @as(u32, @truncate(name_hash)), resolution.value.workspace);
},
else => {},
}
@@ -3986,13 +4000,13 @@ fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash
while (i + 16 < this.packages.len) : (i += 16) {
comptime var j: usize = 0;
inline while (j < 16) : (j += 1) {
- alphabetized_names[(i + j) - 1] = @truncate(PackageID, (i + j));
+ alphabetized_names[(i + j) - 1] = @as(PackageID, @truncate((i + j)));
string_builder.fmtCount("{s}@{}\n", .{ names[i + j].slice(bytes), resolutions[i + j].fmt(bytes) });
}
}
while (i < this.packages.len) : (i += 1) {
- alphabetized_names[i - 1] = @truncate(PackageID, i);
+ alphabetized_names[i - 1] = @as(PackageID, @truncate(i));
string_builder.fmtCount("{s}@{}\n", .{ names[i].slice(bytes), resolutions[i].fmt(bytes) });
}
}
diff --git a/src/install/npm.zig b/src/install/npm.zig
index 6edb6dcb4..0a25fe636 100644
--- a/src/install/npm.zig
+++ b/src/install/npm.zig
@@ -204,7 +204,7 @@ pub const Registry = struct {
package_name,
newly_last_modified,
new_etag,
- @truncate(u32, @intCast(u64, @max(0, std.time.timestamp()))) + 300,
+ @as(u32, @truncate(@as(u64, @intCast(@max(0, std.time.timestamp()))))) + 300,
)) |package| {
if (package_manager.options.enable.manifest_cache) {
PackageManifest.Serializer.save(&package, package_manager.getTemporaryDirectory(), package_manager.getCacheDirectory()) catch {};
@@ -231,7 +231,7 @@ const ExternVersionMap = extern struct {
pub fn findKeyIndex(this: ExternVersionMap, buf: []const Semver.Version, find: Semver.Version) ?u32 {
for (this.keys.get(buf), 0..) |key, i| {
if (key.eql(find)) {
- return @truncate(u32, i);
+ return @as(u32, @truncate(i));
}
}
@@ -290,9 +290,9 @@ pub const OperatingSystem = enum(u16) {
const field: u16 = NameMap.get(str[offset..]) orelse return this_;
if (is_not) {
- return @enumFromInt(OperatingSystem, this & ~field);
+ return @as(OperatingSystem, @enumFromInt(this & ~field));
} else {
- return @enumFromInt(OperatingSystem, this | field);
+ return @as(OperatingSystem, @enumFromInt(this | field));
}
}
};
@@ -355,9 +355,9 @@ pub const Architecture = enum(u16) {
const field: u16 = NameMap.get(input) orelse return this_;
if (is_not) {
- return @enumFromInt(Architecture, this & ~field);
+ return @as(Architecture, @enumFromInt(this & ~field));
} else {
- return @enumFromInt(Architecture, this | field);
+ return @as(Architecture, @enumFromInt(this | field));
}
}
};
@@ -536,7 +536,7 @@ pub const PackageManifest = struct {
stream.pos += Aligner.skipAmount(Type, stream.pos);
const result_bytes = stream.buffer[stream.pos..][0..byte_len];
- const result = @ptrCast([*]const Type, @alignCast(@alignOf([*]const Type), result_bytes.ptr))[0 .. result_bytes.len / @sizeOf(Type)];
+ const result = @as([*]const Type, @ptrCast(@alignCast(result_bytes.ptr)))[0 .. result_bytes.len / @sizeOf(Type)];
stream.pos += result_bytes.len;
return result;
}
@@ -577,7 +577,7 @@ pub const PackageManifest = struct {
var dest_path_stream = std.io.fixedBufferStream(&dest_path_buf);
var dest_path_stream_writer = dest_path_stream.writer();
const hex_fmt = bun.fmt.hexIntLower(file_id);
- const hex_timestamp = @intCast(usize, @max(std.time.milliTimestamp(), 0));
+ const hex_timestamp = @as(usize, @intCast(@max(std.time.milliTimestamp(), 0)));
const hex_timestamp_fmt = bun.fmt.hexIntLower(hex_timestamp);
try dest_path_stream_writer.print("{any}.npm-{any}", .{ hex_fmt, hex_timestamp_fmt });
try dest_path_stream_writer.writeByte(0);
@@ -1306,15 +1306,15 @@ pub const PackageManifest = struct {
}
if (optional_peer_dep_names.items.len == 0) {
- const names_hash_bytes = @bitCast([8]u8, this_names[i].hash);
+ const names_hash_bytes = @as([8]u8, @bitCast(this_names[i].hash));
name_hasher.update(&names_hash_bytes);
- const versions_hash_bytes = @bitCast([8]u8, this_versions[i].hash);
+ const versions_hash_bytes = @as([8]u8, @bitCast(this_versions[i].hash));
version_hasher.update(&versions_hash_bytes);
}
} else {
- const names_hash_bytes = @bitCast([8]u8, this_names[i].hash);
+ const names_hash_bytes = @as([8]u8, @bitCast(this_names[i].hash));
name_hasher.update(&names_hash_bytes);
- const versions_hash_bytes = @bitCast([8]u8, this_versions[i].hash);
+ const versions_hash_bytes = @as([8]u8, @bitCast(this_versions[i].hash));
version_hasher.update(&versions_hash_bytes);
}
@@ -1327,7 +1327,7 @@ pub const PackageManifest = struct {
var version_list = ExternalStringList.init(version_extern_strings, this_versions);
if (comptime is_peer) {
- package_version.optional_peer_dependencies_len = @truncate(u32, peer_dependency_len);
+ package_version.optional_peer_dependencies_len = @as(u32, @truncate(peer_dependency_len));
}
if (count > 0 and
@@ -1502,10 +1502,10 @@ pub const PackageManifest = struct {
}
result.pkg.string_lists_buf.off = 0;
- result.pkg.string_lists_buf.len = @truncate(u32, all_extern_strings.len);
+ result.pkg.string_lists_buf.len = @as(u32, @truncate(all_extern_strings.len));
result.pkg.versions_buf.off = 0;
- result.pkg.versions_buf.len = @truncate(u32, all_semver_versions.len);
+ result.pkg.versions_buf.len = @as(u32, @truncate(all_semver_versions.len));
result.versions = all_semver_versions;
result.external_strings = all_extern_strings;
@@ -1518,7 +1518,7 @@ pub const PackageManifest = struct {
result.string_buf = ptr[0..string_builder.len];
result.pkg.string_buf = BigExternalString{
.off = 0,
- .len = @truncate(u32, string_builder.len),
+ .len = @as(u32, @truncate(string_builder.len)),
.hash = 0,
};
}
diff --git a/src/install/semver.zig b/src/install/semver.zig
index c2730e7c7..1ec76b370 100644
--- a/src/install/semver.zig
+++ b/src/install/semver.zig
@@ -65,7 +65,7 @@ pub const String = extern struct {
if (out.isInline()) {
out.assertDefined();
} else {
- std.debug.assert(@bitCast(u64, out.slice(buf)[0..8].*) != undefined);
+ std.debug.assert(@as(u64, @bitCast(out.slice(buf)[0..8].*)) != undefined);
}
return out;
@@ -76,8 +76,8 @@ pub const String = extern struct {
pub fn isUndefined(this: *const String) bool {
var num: u64 = undefined;
- var bytes = @bitCast(u64, this.bytes);
- return @truncate(u63, bytes) == @truncate(u63, num);
+ var bytes = @as(u64, @bitCast(this.bytes));
+ return @as(u63, @truncate(bytes)) == @as(u63, @truncate(num));
}
pub const Formatter = struct {
@@ -153,7 +153,7 @@ pub const String = extern struct {
pub fn hash(ctx: ArrayHashContext, a: String) u32 {
const str = a.slice(ctx.a_buf);
- return @truncate(u32, bun.hash(str));
+ return @as(u32, @truncate(bun.hash(str)));
}
};
@@ -175,44 +175,44 @@ pub const String = extern struct {
// This should only happen for non-ascii strings that are exactly 8 bytes.
// so that's an edge-case
if ((in[max_inline_len - 1]) >= 128)
- @bitCast(String, (@as(
+ @as(String, @bitCast((@as(
u64,
0,
) | @as(
u64,
- @truncate(
+ @as(
max_addressable_space,
- @bitCast(
+ @truncate(@as(
u64,
- Pointer.init(buf, in),
- ),
+ @bitCast(Pointer.init(buf, in)),
+ )),
),
- )) | 1 << 63)
+ )) | 1 << 63))
else
String{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], in[7] } },
- else => @bitCast(
+ else => @as(
String,
- (@as(
+ @bitCast((@as(
u64,
0,
) | @as(
u64,
- @truncate(
+ @as(
max_addressable_space,
- @bitCast(
+ @truncate(@as(
u64,
- Pointer.init(buf, in),
- ),
+ @bitCast(Pointer.init(buf, in)),
+ )),
),
- )) | 1 << 63,
+ )) | 1 << 63),
),
};
}
pub fn eql(this: String, that: String, this_buf: []const u8, that_buf: []const u8) bool {
if (this.isInline() and that.isInline()) {
- return @bitCast(u64, this.bytes) == @bitCast(u64, that.bytes);
+ return @as(u64, @bitCast(this.bytes)) == @as(u64, @bitCast(that.bytes));
} else if (this.isInline() != that.isInline()) {
return false;
} else {
@@ -223,7 +223,7 @@ pub const String = extern struct {
}
pub inline fn isEmpty(this: String) bool {
- return @bitCast(u64, this.bytes) == @as(u64, 0);
+ return @as(u64, @bitCast(this.bytes)) == @as(u64, 0);
}
pub fn len(this: String) usize {
@@ -263,14 +263,14 @@ pub const String = extern struct {
std.debug.assert(bun.isSliceInBuffer(in, buf));
return Pointer{
- .off = @truncate(u32, @intFromPtr(in.ptr) - @intFromPtr(buf.ptr)),
- .len = @truncate(u32, in.len),
+ .off = @as(u32, @truncate(@intFromPtr(in.ptr) - @intFromPtr(buf.ptr))),
+ .len = @as(u32, @truncate(in.len)),
};
}
};
pub inline fn ptr(this: String) Pointer {
- return @bitCast(Pointer, @as(u64, @truncate(u63, @bitCast(u64, this))));
+ return @as(Pointer, @bitCast(@as(u64, @as(u63, @truncate(@as(u64, @bitCast(this)))))));
}
// String must be a pointer because we reference it as a slice. It will become a dead pointer if it is copied.
@@ -483,7 +483,7 @@ test "String works" {
world,
);
try std.testing.expectEqualStrings("hello", str.slice(buf));
- try std.testing.expectEqual(@bitCast(u64, str), @bitCast(u64, [8]u8{ 'h', 'e', 'l', 'l', 'o', 0, 0, 0 }));
+ try std.testing.expectEqual(@as(u64, @bitCast(str)), @as(u64, @bitCast([8]u8{ 'h', 'e', 'l', 'l', 'o', 0, 0, 0 })));
}
{
@@ -551,7 +551,7 @@ pub const BigExternalString = extern struct {
pub fn from(in: string) BigExternalString {
return BigExternalString{
.off = 0,
- .len = @truncate(u32, in.len),
+ .len = @as(u32, @truncate(in.len)),
.hash = bun.Wyhash.hash(0, in),
};
}
@@ -560,8 +560,8 @@ pub const BigExternalString = extern struct {
std.debug.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr) and ((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len)));
return BigExternalString{
- .off = @truncate(u32, @intFromPtr(in.ptr) - @intFromPtr(buf.ptr)),
- .len = @truncate(u32, in.len),
+ .off = @as(u32, @truncate(@intFromPtr(in.ptr) - @intFromPtr(buf.ptr))),
+ .len = @as(u32, @truncate(in.len)),
.hash = hash,
};
}
@@ -709,7 +709,7 @@ pub const Version = extern struct {
pub const HashContext = struct {
pub fn hash(_: @This(), lhs: Version) u32 {
- return @truncate(u32, lhs.hash());
+ return @as(u32, @truncate(lhs.hash()));
}
pub fn eql(_: @This(), lhs: Version, rhs: Version) bool {
@@ -871,7 +871,7 @@ pub const Version = extern struct {
state = State.none;
},
}
- result.len = @truncate(u32, i);
+ result.len = @as(u32, @truncate(i));
break;
},
'+' => {
@@ -919,7 +919,7 @@ pub const Version = extern struct {
state = State.none;
},
}
- result.len = @truncate(u32, i);
+ result.len = @as(u32, @truncate(i));
return result;
}
@@ -955,7 +955,7 @@ pub const Version = extern struct {
break;
}
- stopped_at = @intCast(i32, i);
+ stopped_at = @as(i32, @intCast(i));
switch (input[i]) {
' ' => {
is_done = true;
@@ -1104,7 +1104,7 @@ pub const Version = extern struct {
}
}
- result.stopped_at = @intCast(u32, i);
+ result.stopped_at = @as(u32, @intCast(i));
if (comptime RawType != void) {
result.version.raw = sliced_string.sub(input[0..i]).external();