aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.zig22
-rw-r--r--misctools/fetch.zig4
-rw-r--r--misctools/http_bench.zig4
-rw-r--r--src/bun.js/api/bun.zig36
-rw-r--r--src/bun.js/api/ffi.zig38
-rw-r--r--src/bun.js/api/html_rewriter.zig4
-rw-r--r--src/bun.js/api/transpiler.zig2
-rw-r--r--src/bun.js/base.zig34
-rw-r--r--src/bun.js/node/types.zig6
-rw-r--r--src/bun.js/webcore/streams.zig16
-rw-r--r--src/bun_js.zig2
-rw-r--r--src/bundler.zig32
-rw-r--r--src/bundler/entry_points.zig2
-rw-r--r--src/bundler/generate_node_modules_bundle.zig10
-rw-r--r--src/bunfig.zig4
-rw-r--r--src/cache.zig4
-rw-r--r--src/cli.zig16
-rw-r--r--src/cli/create_command.zig126
-rw-r--r--src/cli/install_completions_command.zig20
-rw-r--r--src/cli/package_manager_command.zig18
-rw-r--r--src/cli/run_command.zig42
-rw-r--r--src/cli/test_command.zig6
-rw-r--r--src/cli/upgrade_command.zig70
-rw-r--r--src/comptime_string_map.zig2
-rw-r--r--src/defines.zig4
-rw-r--r--src/deps/picohttp.zig10
-rw-r--r--src/deps/zig-clap/clap.zig24
-rw-r--r--src/env_loader.zig8
-rw-r--r--src/feature_flags.zig2
-rw-r--r--src/fs.zig24
-rw-r--r--src/global.zig72
-rw-r--r--src/http.zig124
-rw-r--r--src/http/websocket_http_client.zig6
-rw-r--r--src/http_client_async.zig20
-rw-r--r--src/install/bin.zig27
-rw-r--r--src/install/install.zig38
-rw-r--r--src/install/npm.zig7
-rw-r--r--src/io/io_darwin.zig2
-rw-r--r--src/js_ast.zig38
-rw-r--r--src/js_lexer.zig22
-rw-r--r--src/js_parser.zig17
-rw-r--r--src/libarchive/libarchive.zig8
-rw-r--r--src/linker.zig2
-rw-r--r--src/main.zig4
-rw-r--r--src/napi/napi.zig4
-rw-r--r--src/node_module_bundle.zig4
-rw-r--r--src/panic_handler.zig6
-rw-r--r--src/report.zig8
-rw-r--r--src/sync.zig2
-rw-r--r--src/toml/toml_lexer.zig8
-rw-r--r--src/walker_skippable.zig2
51 files changed, 549 insertions, 464 deletions
diff --git a/build.zig b/build.zig
index 37d73f3ef..fc9d82f22 100644
--- a/build.zig
+++ b/build.zig
@@ -187,7 +187,7 @@ const BunBuildOptions = struct {
var output_dir: []const u8 = "";
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
var file = std.fs.cwd().openFile(filepath, .{ .mode = .read_only }) catch |err| {
- std.debug.panic("error: {any} opening {any}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
+ std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
};
file.close();
@@ -202,7 +202,7 @@ fn updateRuntime() anyerror!void {
);
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
defer runtime_version_file.close();
- runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
+ runtime_version_file.writer().print("{any}", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&runtime_hash))}) catch unreachable;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
const fallback_hash = std.hash.Wyhash.hash(
0,
@@ -211,7 +211,7 @@ fn updateRuntime() anyerror!void {
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
- fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
+ fallback_version_file.writer().print("{any}", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&fallback_hash))}) catch unreachable;
fallback_version_file.close();
}
@@ -274,7 +274,7 @@ pub fn build(b: *std.build.Builder) !void {
if (std.os.getenv("OUTPUT_DIR")) |output_dir_| {
output_dir = output_dir_;
} else {
- const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{any}{any}", .{ bin_label, triplet });
+ const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
output_dir = b.pathFromRoot(output_dir_base);
}
@@ -304,9 +304,9 @@ pub fn build(b: *std.build.Builder) !void {
else .{ .major = 0, .minor = 0, .patch = 0 };
// exe.want_lto = true;
- defer b.default_step.dependOn(&b.addLog("Output: {any}/{any}\n", .{ output_dir, bun_executable_name }).step);
+ defer b.default_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
defer b.default_step.dependOn(&b.addLog(
- "Build {any} v{} - v{}\n",
+ "Build {s} v{} - v{}\n",
.{
triplet,
min_version,
@@ -377,7 +377,7 @@ pub fn build(b: *std.build.Builder) !void {
{
obj_step.dependOn(&b.addLog(
- "Build {any} v{} - v{} ({any})\n",
+ "Build {s} v{} - v{} ({s})\n",
.{
triplet,
min_version,
@@ -415,7 +415,7 @@ pub fn build(b: *std.build.Builder) !void {
obj.link_function_sections = true;
}
- var log_step = b.addLog("Destination: {any}/{any}\n", .{ output_dir, bun_executable_name });
+ var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
log_step.step.dependOn(&obj.step);
}
@@ -515,7 +515,7 @@ pub fn build(b: *std.build.Builder) !void {
try linkObjectFiles(b, headers_obj, target);
{
- var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{any} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"});
+ var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"});
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{});
headers_step.dependOn(&before.step);
headers_step.dependOn(&headers_obj.step);
@@ -539,7 +539,7 @@ pub fn build(b: *std.build.Builder) !void {
try test_.packages.appendSlice(children);
}
- var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{any} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
+ var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{});
headers_step.dependOn(&before.step);
headers_step.dependOn(&test_.step);
@@ -557,6 +557,8 @@ pub var original_make_fn: ?fn (step: *std.build.Step) anyerror!void = null;
pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, target: anytype) !void {
if (target.getOsTag() == .freestanding)
return;
+
+
var dirs_to_search = std.BoundedArray([]const u8, 32).init(0) catch unreachable;
const arm_brew_prefix: []const u8 = "/opt/homebrew";
const x86_brew_prefix: []const u8 = "/usr/local";
diff --git a/misctools/fetch.zig b/misctools/fetch.zig
index 2828f0a99..0b990572f 100644
--- a/misctools/fetch.zig
+++ b/misctools/fetch.zig
@@ -114,12 +114,12 @@ pub const Arguments = struct {
var absolute_path_ = file_path_buf[0..absolute_path_len :0];
var body_file = std.fs.openFileAbsoluteZ(absolute_path_, .{ .mode = .read_only }) catch |err| {
- Output.printErrorln("<r><red>{any}<r> opening file {any}", .{ @errorName(err), absolute_path });
+ Output.printErrorln("<r><red>{s}<r> opening file {s}", .{ @errorName(err), absolute_path });
Global.exit(1);
};
var file_contents = body_file.readToEndAlloc(allocator, try body_file.getEndPos()) catch |err| {
- Output.printErrorln("<r><red>{any}<r> reading file {any}", .{ @errorName(err), absolute_path });
+ Output.printErrorln("<r><red>{s}<r> reading file {s}", .{ @errorName(err), absolute_path });
Global.exit(1);
};
body_string = file_contents;
diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig
index 5064a7da7..11f0f23fa 100644
--- a/misctools/http_bench.zig
+++ b/misctools/http_bench.zig
@@ -120,12 +120,12 @@ pub const Arguments = struct {
var absolute_path_ = file_path_buf[0..absolute_path_len :0];
var body_file = std.fs.openFileAbsoluteZ(absolute_path_, .{ .mode = .read_only }) catch |err| {
- Output.printErrorln("<r><red>{any}<r> opening file {any}", .{ @errorName(err), absolute_path });
+ Output.printErrorln("<r><red>{s}<r> opening file {s}", .{ @errorName(err), absolute_path });
Global.exit(1);
};
var file_contents = body_file.readToEndAlloc(allocator, try body_file.getEndPos()) catch |err| {
- Output.printErrorln("<r><red>{any}<r> reading file {any}", .{ @errorName(err), absolute_path });
+ Output.printErrorln("<r><red>{s}<r> reading file {s}", .{ @errorName(err), absolute_path });
Global.exit(1);
};
body_string = file_contents;
diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig
index e37ed0862..f8f7b55a8 100644
--- a/src/bun.js/api/bun.zig
+++ b/src/bun.js/api/bun.zig
@@ -307,7 +307,7 @@ pub fn registerMacro(
}
if (!arguments[1].?.value().isCell() or !arguments[1].?.value().isCallable(ctx.vm())) {
- JSError(getAllocator(ctx), "Macro must be a function. Received: {any}", .{@tagName(js.JSValueGetType(ctx, arguments[1]))}, ctx, exception);
+ JSError(getAllocator(ctx), "Macro must be a function. Received: {s}", .{@tagName(js.JSValueGetType(ctx, arguments[1]))}, ctx, exception);
return js.JSValueMakeUndefined(ctx);
}
@@ -591,26 +591,26 @@ pub fn readFileAsStringCallback(
) js.JSValueRef {
const path = buf_z.ptr[0..buf_z.len];
var file = std.fs.cwd().openFileZ(buf_z, .{ .mode = .read_only }) catch |err| {
- JSError(getAllocator(ctx), "Opening file {any} for path: \"{any}\"", .{ @errorName(err), path }, ctx, exception);
+ JSError(getAllocator(ctx), "Opening file {s} for path: \"{s}\"", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
defer file.close();
const stat = file.stat() catch |err| {
- JSError(getAllocator(ctx), "Getting file size {any} for \"{any}\"", .{ @errorName(err), path }, ctx, exception);
+ JSError(getAllocator(ctx), "Getting file size {s} for \"{s}\"", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
if (stat.kind != .File) {
- JSError(getAllocator(ctx), "Can't read a {any} as a string (\"{any}\")", .{ @tagName(stat.kind), path }, ctx, exception);
+ JSError(getAllocator(ctx), "Can't read a {s} as a string (\"{s}\")", .{ @tagName(stat.kind), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
}
var contents_buf = VirtualMachine.vm.allocator.alloc(u8, stat.size + 2) catch unreachable; // OOM
defer VirtualMachine.vm.allocator.free(contents_buf);
const contents_len = file.readAll(contents_buf) catch |err| {
- JSError(getAllocator(ctx), "{any} reading file (\"{any}\")", .{ @errorName(err), path }, ctx, exception);
+ JSError(getAllocator(ctx), "{s} reading file (\"{s}\")", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
@@ -631,26 +631,26 @@ pub fn readFileAsBytesCallback(
const path = buf_z.ptr[0..buf_z.len];
var file = std.fs.cwd().openFileZ(buf_z, .{ .mode = .read_only }) catch |err| {
- JSError(getAllocator(ctx), "Opening file {any} for path: \"{any}\"", .{ @errorName(err), path }, ctx, exception);
+ JSError(getAllocator(ctx), "Opening file {s} for path: \"{s}\"", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
defer file.close();
const stat = file.stat() catch |err| {
- JSError(getAllocator(ctx), "Getting file size {any} for \"{any}\"", .{ @errorName(err), path }, ctx, exception);
+ JSError(getAllocator(ctx), "Getting file size {s} for \"{s}\"", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
if (stat.kind != .File) {
- JSError(getAllocator(ctx), "Can't read a {any} as a string (\"{any}\")", .{ @tagName(stat.kind), path }, ctx, exception);
+ JSError(getAllocator(ctx), "Can't read a {s} as a string (\"{s}\")", .{ @tagName(stat.kind), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
}
var contents_buf = VirtualMachine.vm.allocator.alloc(u8, stat.size + 2) catch unreachable; // OOM
errdefer VirtualMachine.vm.allocator.free(contents_buf);
const contents_len = file.readAll(contents_buf) catch |err| {
- JSError(getAllocator(ctx), "{any} reading file (\"{any}\")", .{ @errorName(err), path }, ctx, exception);
+ JSError(getAllocator(ctx), "{s} reading file (\"{s}\")", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
@@ -743,7 +743,7 @@ pub fn openInEditor(
editor_choice = edit.editor;
if (editor_choice == null) {
edit.* = prev;
- JSError(getAllocator(ctx), "Could not find editor \"{any}\"", .{sliced.slice()}, ctx, exception);
+ JSError(getAllocator(ctx), "Could not find editor \"{s}\"", .{sliced.slice()}, ctx, exception);
return js.JSValueMakeUndefined(ctx);
} else if (edit.name.ptr == edit.path.ptr) {
edit.name = bun.default_allocator.dupe(u8, edit.path) catch unreachable;
@@ -778,7 +778,7 @@ pub fn openInEditor(
}
editor.open(edit.path, path, line, column, bun.default_allocator) catch |err| {
- JSC.JSError(bun.default_allocator, "Opening editor failed {any}", .{@errorName(err)}, ctx, exception);
+ JSC.JSError(bun.default_allocator, "Opening editor failed {s}", .{@errorName(err)}, ctx, exception);
return null;
};
@@ -1049,7 +1049,7 @@ pub fn readAllStdinSync(
var stdin = std.io.getStdIn();
var result = stdin.readToEndAlloc(allocator, std.math.maxInt(u32)) catch |err| {
- JSError(undefined, "{any} reading stdin", .{@errorName(err)}, ctx, exception);
+ JSError(undefined, "{s} reading stdin", .{@errorName(err)}, ctx, exception);
return null;
};
var out = ZigString.init(result);
@@ -1375,7 +1375,7 @@ pub const Crypto = struct {
switch (string_or_buffer) {
.string => |str| {
const encoding = JSC.Node.Encoding.from(str) orelse {
- globalThis.throwInvalidArguments("Unknown encoding: {any}", .{str});
+ globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str});
return JSC.JSValue.zero;
};
@@ -1428,7 +1428,7 @@ pub const Crypto = struct {
switch (string_or_buffer) {
.string => |str| {
const encoding = JSC.Node.Encoding.from(str) orelse {
- globalThis.throwInvalidArguments("Unknown encoding: {any}", .{str});
+ globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str});
return JSC.JSValue.zero;
};
@@ -2124,19 +2124,19 @@ pub const Unsafe = struct {
// defer getAllocator(ctx).destroy(lockfile);
// switch (cause.step) {
// .open_file => {
-// JSError(undefined, "error opening lockfile: {any}", .{
+// JSError(undefined, "error opening lockfile: {s}", .{
// @errorName(cause.value),
// }, ctx, exception);
// return null;
// },
// .parse_file => {
-// JSError(undefined, "error parsing lockfile: {any}", .{
+// JSError(undefined, "error parsing lockfile: {s}", .{
// @errorName(cause.value),
// }, ctx, exception);
// return null;
// },
// .read_file => {
-// JSError(undefined, "error reading lockfile: {any}", .{
+// JSError(undefined, "error reading lockfile: {s}", .{
// @errorName(cause.value),
// }, ctx, exception);
// return null;
@@ -2934,7 +2934,7 @@ pub const FFI = struct {
}
const array_buffer = value.asArrayBuffer(globalThis) orelse {
- return JSC.toInvalidArguments("Expected ArrayBufferView but received {any}", .{@tagName(value.jsType())}, globalThis);
+ return JSC.toInvalidArguments("Expected ArrayBufferView but received {s}", .{@tagName(value.jsType())}, globalThis);
};
if (array_buffer.len == 0) {
diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig
index 7d97d50dc..53ced79b3 100644
--- a/src/bun.js/api/ffi.zig
+++ b/src/bun.js/api/ffi.zig
@@ -331,7 +331,7 @@ pub const FFI = struct {
// optional if the user passed "ptr"
if (function.symbol_from_dynamic_library == null) {
var resolved_symbol = dylib.lookup(*anyopaque, function_name) orelse {
- const ret = JSC.toInvalidArguments("Symbol \"{any}\" not found in \"{any}\"", .{ std.mem.span(function_name), name_slice.slice() }, global);
+ const ret = JSC.toInvalidArguments("Symbol \"{s}\" not found in \"{s}\"", .{ std.mem.span(function_name), name_slice.slice() }, global);
for (symbols.values()) |*value| {
allocator.free(bun.constStrToU8(std.mem.span(value.base_name.?)));
value.arg_types.clearAndFree(allocator);
@@ -345,7 +345,7 @@ pub const FFI = struct {
}
function.compile(allocator) catch |err| {
- const ret = JSC.toInvalidArguments("{any} when compiling symbol \"{any}\" in \"{any}\"", .{
+ const ret = JSC.toInvalidArguments("{s} when compiling symbol \"{s}\" in \"{s}\"", .{
std.mem.span(@errorName(err)),
std.mem.span(function_name),
name_slice.slice(),
@@ -434,7 +434,7 @@ pub const FFI = struct {
const function_name = function.base_name.?;
if (function.symbol_from_dynamic_library == null) {
- const ret = JSC.toInvalidArguments("Symbol for \"{any}\" not found", .{std.mem.span(function_name)}, global);
+ const ret = JSC.toInvalidArguments("Symbol for \"{s}\" not found", .{std.mem.span(function_name)}, global);
for (symbols.values()) |*value| {
allocator.free(bun.constStrToU8(std.mem.span(value.base_name.?)));
value.arg_types.clearAndFree(allocator);
@@ -444,7 +444,7 @@ pub const FFI = struct {
}
function.compile(allocator) catch |err| {
- const ret = JSC.toInvalidArguments("{any} when compiling symbol \"{any}\"", .{
+ const ret = JSC.toInvalidArguments("{s} when compiling symbol \"{s}\"", .{
std.mem.span(@errorName(err)),
std.mem.span(function_name),
}, global);
@@ -544,7 +544,7 @@ pub const FFI = struct {
defer type_name.deinit();
abi_types.appendAssumeCapacity(ABIType.label.get(type_name.slice()) orelse {
abi_types.clearAndFree(allocator);
- return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown type {any}", .{type_name.slice()}, global);
+ return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown type {s}", .{type_name.slice()}, global);
});
}
}
@@ -576,7 +576,7 @@ pub const FFI = struct {
defer ret_slice.deinit();
return_type = ABIType.label.get(ret_slice.slice()) orelse {
abi_types.clearAndFree(allocator);
- return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown return type {any}", .{ret_slice.slice()}, global);
+ return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown return type {s}", .{ret_slice.slice()}, global);
};
}
@@ -624,7 +624,7 @@ pub const FFI = struct {
const value = symbols_iter.value;
if (value.isEmptyOrUndefinedOrNull()) {
- return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Expected an object for key \"{any}\"", .{prop}, global);
+ return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Expected an object for key \"{s}\"", .{prop}, global);
}
var function: Function = .{};
@@ -1143,7 +1143,7 @@ pub const FFI = struct {
try this.return_type.typename(writer);
try writer.writeAll(" return_value = ");
}
- try writer.print("{any}(", .{std.mem.span(this.base_name.?)});
+ try writer.print("{s}(", .{std.mem.span(this.base_name.?)});
first = true;
arg_buf[0..3].* = "arg".*;
for (this.arg_types.items) |arg, i| {
@@ -1296,7 +1296,7 @@ pub const FFI = struct {
const len = inner_buf.len + 1;
inner_buf = inner_buf_[0..len];
inner_buf[0] = '_';
- try writer.print("return {any}", .{this.return_type.toCExact(inner_buf)});
+ try writer.print("return {s}", .{this.return_type.toCExact(inner_buf)});
}
try writer.writeAll(";\n}\n\n");
@@ -1449,7 +1449,7 @@ pub const FFI = struct {
},
.char, .int8_t, .uint8_t, .int16_t, .uint16_t, .int32_t, .uint32_t => {
if (self.exact)
- try writer.print("({any})", .{std.mem.span(@tagName(self.tag))});
+ try writer.print("({s})", .{std.mem.span(@tagName(self.tag))});
try writer.writeAll("JSVALUE_TO_INT32(");
},
@@ -1498,31 +1498,31 @@ pub const FFI = struct {
switch (self.tag) {
.void => {},
.bool => {
- try writer.print("BOOLEAN_TO_JSVALUE({any})", .{self.symbol});
+ try writer.print("BOOLEAN_TO_JSVALUE({s})", .{self.symbol});
},
.char, .int8_t, .uint8_t, .int16_t, .uint16_t, .int32_t => {
- try writer.print("INT32_TO_JSVALUE((int32_t){any})", .{self.symbol});
+ try writer.print("INT32_TO_JSVALUE((int32_t){s})", .{self.symbol});
},
.uint32_t, .i64_fast => {
- try writer.print("INT64_TO_JSVALUE(JS_GLOBAL_OBJECT, (int64_t){any})", .{self.symbol});
+ try writer.print("INT64_TO_JSVALUE(JS_GLOBAL_OBJECT, (int64_t){s})", .{self.symbol});
},
.int64_t => {
- try writer.print("INT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {any})", .{self.symbol});
+ try writer.print("INT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {s})", .{self.symbol});
},
.u64_fast => {
- try writer.print("UINT64_TO_JSVALUE(JS_GLOBAL_OBJECT, {any})", .{self.symbol});
+ try writer.print("UINT64_TO_JSVALUE(JS_GLOBAL_OBJECT, {s})", .{self.symbol});
},
.uint64_t => {
- try writer.print("UINT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {any})", .{self.symbol});
+ try writer.print("UINT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {s})", .{self.symbol});
},
.function, .cstring, .ptr => {
- try writer.print("PTR_TO_JSVALUE({any})", .{self.symbol});
+ try writer.print("PTR_TO_JSVALUE({s})", .{self.symbol});
},
.double => {
- try writer.print("DOUBLE_TO_JSVALUE({any})", .{self.symbol});
+ try writer.print("DOUBLE_TO_JSVALUE({s})", .{self.symbol});
},
.float => {
- try writer.print("FLOAT_TO_JSVALUE({any})", .{self.symbol});
+ try writer.print("FLOAT_TO_JSVALUE({s})", .{self.symbol});
},
}
}
diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig
index 252915f56..a3632bf32 100644
--- a/src/bun.js/api/html_rewriter.zig
+++ b/src/bun.js/api/html_rewriter.zig
@@ -735,7 +735,7 @@ const DocumentHandler = struct {
.Object, .ProxyObject, .Cell, .FinalObject => {},
else => |kind| {
JSC.throwInvalidArguments(
- "Expected object but received {any}",
+ "Expected object but received {s}",
.{@as(string, @tagName(kind))},
global,
exception,
@@ -886,7 +886,7 @@ const ElementHandler = struct {
.Object, .ProxyObject, .Cell, .FinalObject => {},
else => |kind| {
JSC.throwInvalidArguments(
- "Expected object but received {any}",
+ "Expected object but received {s}",
.{@as(string, @tagName(kind))},
global,
exception,
diff --git a/src/bun.js/api/transpiler.zig b/src/bun.js/api/transpiler.zig
index 75b5866bf..eba9ef7aa 100644
--- a/src/bun.js/api/transpiler.zig
+++ b/src/bun.js/api/transpiler.zig
@@ -667,7 +667,7 @@ fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allo
var key = try key_.toOwnedSlice(bun.default_allocator);
if (!JSLexer.isIdentifier(key)) {
- JSC.throwInvalidArguments("\"{any}\" is not a valid ECMAScript identifier", .{key}, ctx, exception);
+ JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{key}, ctx, exception);
bun.default_allocator.free(key);
return transpiler;
}
diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig
index 259a04934..b014b2430 100644
--- a/src/bun.js/base.zig
+++ b/src/bun.js/base.zig
@@ -273,7 +273,7 @@ pub const To = struct {
if (comptime Info == .Struct) {
if (comptime @hasDecl(Type, "Class") and @hasDecl(Type.Class, "isJavaScriptCoreClass")) {
if (comptime !@hasDecl(Type, "finalize")) {
- @compileError(comptime std.fmt.comptimePrint("JSC class {any} must implement finalize to prevent memory leaks", .{Type.Class.name}));
+ @compileError(comptime std.fmt.comptimePrint("JSC class {s} must implement finalize to prevent memory leaks", .{Type.Class.name}));
}
if (comptime !@hasDecl(Type, "toJS")) {
@@ -583,12 +583,12 @@ pub const d = struct {
}
if (no_type) {
- buf = buf ++ printIndented("{any}({any});\n", .{
+ buf = buf ++ printIndented("{s}({s});\n", .{
func.name,
args,
}, indent);
} else {
- buf = buf ++ printIndented("{any}({any}): {any};\n", .{
+ buf = buf ++ printIndented("{s}({s}): {s};\n", .{
func.name,
args,
func.@"return",
@@ -618,12 +618,12 @@ pub const d = struct {
}
if (no_type) {
- buf = buf ++ printIndented("function {any}({any});\n", .{
+ buf = buf ++ printIndented("function {s}({s});\n", .{
func.name,
args,
}, indent);
} else {
- buf = buf ++ printIndented("function {any}({any}): {any};\n", .{
+ buf = buf ++ printIndented("function {s}({s}): {s};\n", .{
func.name,
args,
func.@"return",
@@ -676,7 +676,7 @@ pub const d = struct {
if (klass.global) {
buf = buf ++ printIndented("declare global {{\n", .{}, indent);
} else {
- buf = buf ++ printIndented("declare module \"{any}\" {{\n", .{klass.path}, indent);
+ buf = buf ++ printIndented("declare module \"{s}\" {{\n", .{klass.path}, indent);
}
indent += indent_level;
@@ -734,9 +734,9 @@ pub const d = struct {
const qualifier = if (!klass.default_export) "export " else "";
if (klass.interface) {
- buf = buf ++ printIndented("export interface {any} {{\n", .{klass.name}, indent);
+ buf = buf ++ printIndented("export interface {s} {{\n", .{klass.name}, indent);
} else {
- buf = buf ++ printIndented("{any}class {any} {{\n", .{ qualifier, klass.name }, indent);
+ buf = buf ++ printIndented("{s}class {s} {{\n", .{ qualifier, klass.name }, indent);
}
indent += indent_level;
@@ -781,7 +781,7 @@ pub const d = struct {
buf = buf ++ printIndented("}}\n", .{}, indent);
if (klass.default_export) {
- buf = buf ++ printIndented("export = {any};\n", .{klass.name}, indent);
+ buf = buf ++ printIndented("export = {s};\n", .{klass.name}, indent);
}
break :brk;
@@ -797,14 +797,14 @@ pub const d = struct {
const first = splitter.next() orelse break :brk;
const second = splitter.next() orelse {
- buf = buf ++ printIndented("/** {any} */\n", .{std.mem.trim(u8, first, " ")}, indent);
+ buf = buf ++ printIndented("/** {s} */\n", .{std.mem.trim(u8, first, " ")}, indent);
break :brk;
};
buf = buf ++ printIndented("/**\n", .{}, indent);
- buf = buf ++ printIndented(" * {any}\n", .{std.mem.trim(u8, first, " ")}, indent);
- buf = buf ++ printIndented(" * {any}\n", .{std.mem.trim(u8, second, " ")}, indent);
+ buf = buf ++ printIndented(" * {s}\n", .{std.mem.trim(u8, first, " ")}, indent);
+ buf = buf ++ printIndented(" * {s}\n", .{std.mem.trim(u8, second, " ")}, indent);
while (splitter.next()) |line| {
- buf = buf ++ printIndented(" * {any}\n", .{std.mem.trim(u8, line, " ")}, indent);
+ buf = buf ++ printIndented(" * {s}\n", .{std.mem.trim(u8, line, " ")}, indent);
}
buf = buf ++ printIndented("*/\n", .{}, indent);
}
@@ -1441,7 +1441,7 @@ pub fn NewClassWithInstanceType(
try writer.writeAll(" ");
}
- try writer.print("{any} ", .{GetterNameFormatter{ .index = i }});
+ try writer.print("{s} ", .{GetterNameFormatter{ .index = i }});
k = 0;
@@ -1473,7 +1473,7 @@ pub fn NewClassWithInstanceType(
try writer.writeAll(" ");
}
- try writer.print("{any} ", .{FunctionNameFormatter{ .index = i }});
+ try writer.print("{s} ", .{FunctionNameFormatter{ .index = i }});
k = 0;
while (k < middle_padding - name_.len) : (k += 1) {
@@ -3278,7 +3278,7 @@ pub fn DOMCall(
try writer.writeAll("JSC::DOMJIT::Effect::forPure(),\n ");
} else if (effect.writes[0] == DOMEffect.pure.writes[0]) {
try writer.print(
- "JSC::DOMJIT::Effect::forReadKinds(JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}),\n ",
+ "JSC::DOMJIT::Effect::forReadKinds(JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}),\n ",
.{
@tagName(effect.reads[0]),
@tagName(effect.reads[1]),
@@ -3288,7 +3288,7 @@ pub fn DOMCall(
);
} else if (effect.reads[0] == DOMEffect.pure.reads[0]) {
try writer.print(
- "JSC::DOMJIT::Effect::forWriteKinds(JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}),\n ",
+ "JSC::DOMJIT::Effect::forWriteKinds(JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}),\n ",
.{
@tagName(effect.writes[0]),
@tagName(effect.writes[1]),
diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig
index bb7ef85ca..89d466c4e 100644
--- a/src/bun.js/node/types.zig
+++ b/src/bun.js/node/types.zig
@@ -1564,7 +1564,7 @@ pub const Path = struct {
if (name_.isEmpty()) {
return JSC.ZigString.Empty.toValue(globalThis);
}
- const out = std.fmt.allocPrint(allocator, "{any}{any}", .{ name_, ext }) catch unreachable;
+ const out = std.fmt.allocPrint(allocator, "{s}{s}", .{ name_, ext }) catch unreachable;
defer allocator.free(out);
return JSC.ZigString.init(out).withEncoding().toValueGC(globalThis);
@@ -1573,13 +1573,13 @@ pub const Path = struct {
if (insert_separator) {
const separator = if (!isWindows) "/" else "\\";
if (name_with_ext.isEmpty()) {
- const out = std.fmt.allocPrint(allocator, "{}{any}{}{}", .{ dir, separator, name_, ext }) catch unreachable;
+ const out = std.fmt.allocPrint(allocator, "{}{s}{}{}", .{ dir, separator, name_, ext }) catch unreachable;
defer allocator.free(out);
return JSC.ZigString.init(out).withEncoding().toValueGC(globalThis);
}
{
- const out = std.fmt.allocPrint(allocator, "{}{any}{}", .{
+ const out = std.fmt.allocPrint(allocator, "{}{s}{}", .{
dir,
separator,
name_with_ext,
diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig
index 6d4fcb6bf..49e214bb7 100644
--- a/src/bun.js/webcore/streams.zig
+++ b/src/bun.js/webcore/streams.zig
@@ -3162,6 +3162,7 @@ pub const FileBlobLoader = struct {
resume this.concurrent.read_frame;
}
+ const ScheduleReadFrame = @Frame(scheduleRead);
pub fn scheduleRead(this: *FileBlobLoader) void {
if (comptime Environment.isMac) {
var remaining = this.buf[this.concurrent.read..];
@@ -3209,14 +3210,15 @@ pub const FileBlobLoader = struct {
null,
);
- suspend {
- var _frame = @frame();
- var this_frame = bun.default_allocator.create(std.meta.Child(@TypeOf(_frame))) catch unreachable;
- this_frame.* = _frame.*;
- this.concurrent.read_frame = this_frame;
- }
+ // suspend {
+ // var _frame = @frame();
+ // var this_frame = bun.allocateFrame(ScheduleReadFrame) catch unreachable;
+ // this_frame.* = _frame.*;
+ // this.concurrent.read_frame = this_frame;
+ // }
+ unreachable;
- scheduleMainThreadTask(this);
+ // scheduleMainThreadTask(this);
}
pub fn onJSThread(task_ctx: *anyopaque) void {
diff --git a/src/bun_js.zig b/src/bun_js.zig
index 8f05eb560..d406033a1 100644
--- a/src/bun_js.zig
+++ b/src/bun_js.zig
@@ -101,7 +101,7 @@ pub const Run = struct {
null,
logger.Loc.Empty,
run.vm.allocator,
- "BUN_CONFIG_MAX_HTTP_REQUESTS value \"{any}\" is not a valid integer between 1 and 65535",
+ "BUN_CONFIG_MAX_HTTP_REQUESTS value \"{s}\" is not a valid integer between 1 and 65535",
.{max_http_requests},
) catch unreachable;
break :load;
diff --git a/src/bundler.zig b/src/bundler.zig
index f9ff22a38..b8a32b5a5 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -253,13 +253,13 @@ pub const PluginRunner = struct {
if (static_namespace) {
return Fs.Path.initWithNamespace(
- std.fmt.allocPrint(this.allocator, "{any}", .{file_path}) catch unreachable,
+ std.fmt.allocPrint(this.allocator, "{s}", .{file_path}) catch unreachable,
user_namespace.slice(),
);
} else {
return Fs.Path.initWithNamespace(
- std.fmt.allocPrint(this.allocator, "{any}", .{file_path}) catch unreachable,
- std.fmt.allocPrint(this.allocator, "{any}", .{user_namespace}) catch unreachable,
+ std.fmt.allocPrint(this.allocator, "{s}", .{file_path}) catch unreachable,
+ std.fmt.allocPrint(this.allocator, "{s}", .{user_namespace}) catch unreachable,
);
}
}
@@ -347,7 +347,7 @@ pub const PluginRunner = struct {
// Our super slow way of cloning the string into memory owned by JSC
var combined_string = std.fmt.allocPrint(
this.allocator,
- "{any}:{any}",
+ "{s}:{s}",
.{ user_namespace, file_path },
) catch unreachable;
const out = JSC.ZigString.init(combined_string).toValueGC(this.global_object).getZigString(this.global_object);
@@ -411,13 +411,13 @@ pub const Bundler = struct {
};
if (has_dot_slash_form) {
- bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{any} resolving \"{any}\". Did you mean: \"./{any}\"", .{
+ bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving \"{s}\". Did you mean: \"./{s}\"", .{
@errorName(err),
entry_point,
entry_point,
}) catch unreachable;
} else {
- bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{any} resolving \"{any}\" (entry point)", .{ @errorName(err), entry_point }) catch unreachable;
+ bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving \"{s}\" (entry point)", .{ @errorName(err), entry_point }) catch unreachable;
}
return err;
@@ -965,7 +965,7 @@ pub const Bundler = struct {
var file: std.fs.File = undefined;
if (Outstream == Dir) {
- const output_dir = outstream;
+ const output_dir = outstream.dir;
if (std.fs.path.dirname(file_path.pretty)) |dirname| {
try output_dir.makePath(dirname);
@@ -1033,7 +1033,7 @@ pub const Bundler = struct {
file_op.is_tmpdir = false;
if (Outstream == Dir) {
- file_op.dir = outstream.fd;
+ file_op.dir = outstream.dir.fd;
if (bundler.fs.fs.needToCloseFiles()) {
file.close();
@@ -1091,7 +1091,7 @@ pub const Bundler = struct {
file_op.is_tmpdir = false;
if (Outstream == Dir) {
- file_op.dir = outstream.fd;
+ file_op.dir = outstream.dir.fd;
if (bundler.fs.fs.needToCloseFiles()) {
file.close();
@@ -1374,7 +1374,7 @@ pub const Bundler = struct {
true,
file_descriptor,
) catch |err| {
- bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{any} reading \"{any}\"", .{ @errorName(err), path.text }) catch {};
+ bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} reading \"{s}\"", .{ @errorName(err), path.text }) catch {};
return null;
};
input_fd = entry.fd;
@@ -1513,7 +1513,7 @@ pub const Bundler = struct {
null,
logger.Loc.Empty,
bundler.allocator,
- "Invalid wasm file \"{any}\" (missing magic header)",
+ "Invalid wasm file \"{s}\" (missing magic header)",
.{path.text},
) catch {};
return null;
@@ -1528,7 +1528,7 @@ pub const Bundler = struct {
}
},
.css => {},
- else => Global.panic("Unsupported loader {any} for path: {any}", .{ loader, source.path.text }),
+ else => Global.panic("Unsupported loader {any} for path: {s}", .{ loader, source.path.text }),
}
return null;
@@ -1682,12 +1682,12 @@ pub const Bundler = struct {
}
const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| {
- Output.prettyError("Error resolving \"{any}\": {any}\n", .{ entry, @errorName(err) });
+ Output.prettyError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) });
continue;
};
if (result.pathConst() == null) {
- Output.prettyError("\"{any}\" is disabled due to \"browser\" field in package.json.\n", .{
+ Output.prettyError("\"{s}\" is disabled due to \"browser\" field in package.json.\n", .{
entry,
});
continue;
@@ -1767,10 +1767,10 @@ pub const Bundler = struct {
};
}
} else {
- const output_dir = bundler.options.output_dir_handle orelse {
+ const output_dir: Dir = .{.dir = bundler.options.output_dir_handle orelse {
Output.printError("Invalid or missing output directory.", .{});
Global.crash();
- };
+ }};
if (load_from_routes) {
if (bundler.options.framework) |*framework| {
diff --git a/src/bundler/entry_points.zig b/src/bundler/entry_points.zig
index 563544d1d..e391be277 100644
--- a/src/bundler/entry_points.zig
+++ b/src/bundler/entry_points.zig
@@ -279,7 +279,7 @@ pub const MacroEntryPoint = struct {
hasher.update(function_name);
const truncated_u32 = @truncate(u32, hasher.final());
- const specifier = std.fmt.bufPrint(buf, js_ast.Macro.namespaceWithColon ++ "//{x}.js", .{truncated_u32}) catch unreachable;
+ const specifier = std.fmt.bufPrint(buf, js_ast.Macro.namespaceWithColon ++ "//{any}.js", .{bun.fmt.x(truncated_u32)}) catch unreachable;
len.* = @truncate(u32, specifier.len);
return generateIDFromSpecifier(specifier);
diff --git a/src/bundler/generate_node_modules_bundle.zig b/src/bundler/generate_node_modules_bundle.zig
index 1b18bc7e4..149c647b8 100644
--- a/src/bundler/generate_node_modules_bundle.zig
+++ b/src/bundler/generate_node_modules_bundle.zig
@@ -684,11 +684,11 @@ pub fn generate(
fixed_buffer_writer.print(
\\if ('window' in globalThis) {{
\\ (function() {{
- \\ BUN_RUNTIME.__injectFastRefresh(${x}());
+ \\ BUN_RUNTIME.__injectFastRefresh(${any}());
\\ }})();
\\}}
,
- .{refresh_runtime_module_id},
+ .{bun.fmt.x(refresh_runtime_module_id)},
) catch unreachable;
try this.tmpfile.writeAll(fixed_buffer.buffer[0..fixed_buffer.pos]);
}
@@ -793,7 +793,7 @@ pub fn generate(
const etag_u64 = hasher.final();
// We store the etag as a ascii hex encoded u64
// This is so we can send the bytes directly in the HTTP server instead of formatting it as hex each time.
- javascript_bundle.etag = try std.fmt.allocPrint(allocator, "{x}", .{etag_u64});
+ javascript_bundle.etag = try std.fmt.allocPrint(allocator, "{any}", .{bun.fmt.x(etag_u64)});
javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp()));
const basename = std.fs.path.basename(std.mem.span(destination));
@@ -803,10 +803,10 @@ pub fn generate(
else
try std.fmt.allocPrint(
this.allocator,
- "/{s}.{x}.bun",
+ "/{s}.{any}.bun",
.{
basename[0 .. basename.len - extname.len],
- etag_u64,
+ bun.fmt.x(etag_u64),
},
);
diff --git a/src/bunfig.zig b/src/bunfig.zig
index 0224f04eb..ea4b93993 100644
--- a/src/bunfig.zig
+++ b/src/bunfig.zig
@@ -64,11 +64,11 @@ pub const Bunfig = struct {
// Token
if (url.username.len == 0 and url.password.len > 0) {
registry.token = url.password;
- registry.url = try std.fmt.allocPrint(this.allocator, "{any}://{any}/{any}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") });
+ registry.url = try std.fmt.allocPrint(this.allocator, "{any}://{any}/{s}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") });
} else if (url.username.len > 0 and url.password.len > 0) {
registry.username = url.username;
registry.password = url.password;
- registry.url = try std.fmt.allocPrint(this.allocator, "{any}://{any}/{any}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") });
+ registry.url = try std.fmt.allocPrint(this.allocator, "{any}://{any}/{s}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") });
} else {
registry.url = url.href;
}
diff --git a/src/cache.zig b/src/cache.zig
index 3ee2138d4..10e5b588f 100644
--- a/src/cache.zig
+++ b/src/cache.zig
@@ -119,14 +119,14 @@ pub const Fs = struct {
const file = if (this.stream)
rfs.readFileWithHandle(path, null, file_handle, true, shared, true) catch |err| {
if (comptime Environment.isDebug) {
- Output.printError("{any}: readFile error -- {any}", .{ path, @errorName(err) });
+ Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
}
return err;
}
else
rfs.readFileWithHandle(path, null, file_handle, true, shared, false) catch |err| {
if (comptime Environment.isDebug) {
- Output.printError("{any}: readFile error -- {any}", .{ path, @errorName(err) });
+ Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
}
return err;
};
diff --git a/src/cli.zig b/src/cli.zig
index b8b31add4..f0e7526b5 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -129,7 +129,7 @@ pub const Arguments = struct {
}
pub fn fileReadError(err: anyerror, stderr: anytype, filename: string, kind: string) noreturn {
- stderr.writer().print("Error reading file \"{any}\" for {any}: {any}", .{ filename, kind, @errorName(err) }) catch {};
+ stderr.writer().print("Error reading file \"{s}\" for {s}: {s}", .{ filename, kind, @errorName(err) }) catch {};
std.process.exit(1);
}
@@ -223,7 +223,7 @@ pub const Arguments = struct {
fn loadConfigPath(allocator: std.mem.Allocator, auto_loaded: bool, config_path: [:0]const u8, ctx: *Command.Context, comptime cmd: Command.Tag) !void {
var config_file = std.fs.openFileAbsoluteZ(config_path, .{ .mode = .read_only }) catch |err| {
if (auto_loaded) return;
- Output.prettyErrorln("<r><red>error<r>: {any} opening config \"{any}\"", .{
+ Output.prettyErrorln("<r><red>error<r>: {s} opening config \"{s}\"", .{
@errorName(err),
std.mem.span(config_path),
});
@@ -232,7 +232,7 @@ pub const Arguments = struct {
defer config_file.close();
var contents = config_file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| {
if (auto_loaded) return;
- Output.prettyErrorln("<r><red>error<r>: {any} reading config \"{any}\"", .{
+ Output.prettyErrorln("<r><red>error<r>: {s} reading config \"{s}\"", .{
@errorName(err),
std.mem.span(config_path),
});
@@ -453,7 +453,7 @@ pub const Arguments = struct {
} else if (enum_value.len == 0) {
ctx.debug.global_cache = options.GlobalCache.force;
} else {
- Output.prettyErrorln("Invalid value for --install: \"{any}\". Must be either \"auto\", \"fallback\", \"force\", or \"disable\"\n", .{enum_value});
+ Output.prettyErrorln("Invalid value for --install: \"{s}\". Must be either \"auto\", \"fallback\", \"force\", or \"disable\"\n", .{enum_value});
Global.exit(1);
}
}
@@ -477,7 +477,7 @@ pub const Arguments = struct {
} else if (strings.eqlComptime(setting, "external")) {
opts.source_map = Api.SourceMapMode.external;
} else {
- Output.prettyErrorln("<r><red>error<r>: Invalid sourcemap setting: \"{any}\"", .{setting});
+ Output.prettyErrorln("<r><red>error<r>: Invalid sourcemap setting: \"{s}\"", .{setting});
Global.crash();
}
}
@@ -1225,12 +1225,12 @@ pub const Command = struct {
}
if (was_js_like) {
- Output.prettyErrorln("<r><red>error<r>: Module not found \"<b>{any}<r>\"", .{
+ Output.prettyErrorln("<r><red>error<r>: Module not found \"<b>{s}<r>\"", .{
ctx.positionals[0],
});
Global.exit(1);
} else if (ctx.positionals.len > 0) {
- Output.prettyErrorln("<r><red>error<r>: File not found \"<b>{any}<r>\"", .{
+ Output.prettyErrorln("<r><red>error<r>: File not found \"<b>{s}<r>\"", .{
ctx.positionals[0],
});
Global.exit(1);
@@ -1299,7 +1299,7 @@ pub const Command = struct {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
- Output.prettyErrorln("<r><red>error<r>: Failed to run <b>{any}<r> due to error <b>{any}<r>", .{
+ Output.prettyErrorln("<r><red>error<r>: Failed to run <b>{s}<r> due to error <b>{s}<r>", .{
std.fs.path.basename(file_path),
@errorName(err),
});
diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig
index 43494c893..c2b08bc99 100644
--- a/src/cli/create_command.zig
+++ b/src/cli/create_command.zig
@@ -87,7 +87,7 @@ const UnsupportedPackages = struct {
pub fn print(this: UnsupportedPackages) void {
inline for (comptime std.meta.fieldNames(UnsupportedPackages)) |field_name| {
if (@field(this, field_name)) {
- Output.prettyErrorln("<r><yellow>warn<r><d>:<r> <b>\"{any}\"<r> won't work in bun yet\n", .{field_name});
+ Output.prettyErrorln("<r><yellow>warn<r><d>:<r> <b>\"{s}\"<r> won't work in bun yet\n", .{field_name});
}
}
}
@@ -132,9 +132,9 @@ fn execTask(allocator: std.mem.Allocator, task_: string, cwd: string, _: string,
Output.pretty("\n<r><d>$<b>", .{});
for (argv) |arg, i| {
if (i > argv.len - 1) {
- Output.print(" {any} ", .{arg});
+ Output.print(" {s} ", .{arg});
} else {
- Output.print(" {any}", .{arg});
+ Output.print(" {s}", .{arg});
}
}
Output.pretty("<r>", .{});
@@ -285,7 +285,7 @@ pub const CreateCommand = struct {
var outdir_path_ = home_dir_buf[0..outdir_path.len :0];
std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer;
if (create_options.verbose) {
- Output.prettyErrorln("reading from {any}", .{outdir_path});
+ Output.prettyErrorln("reading from {s}", .{outdir_path});
}
example_tag = Example.Tag.local_folder;
break :brk outdir_path;
@@ -299,7 +299,7 @@ pub const CreateCommand = struct {
var outdir_path_ = home_dir_buf[0..outdir_path.len :0];
std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer;
if (create_options.verbose) {
- Output.prettyErrorln("reading from {any}", .{outdir_path});
+ Output.prettyErrorln("reading from {s}", .{outdir_path});
}
example_tag = Example.Tag.local_folder;
break :brk outdir_path;
@@ -313,7 +313,7 @@ pub const CreateCommand = struct {
var outdir_path_ = home_dir_buf[0..outdir_path.len :0];
std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer;
if (create_options.verbose) {
- Output.prettyErrorln("reading from {any}", .{outdir_path});
+ Output.prettyErrorln("reading from {s}", .{outdir_path});
}
example_tag = Example.Tag.local_folder;
break :brk outdir_path;
@@ -379,7 +379,7 @@ pub const CreateCommand = struct {
const destination = try filesystem.dirname_store.append([]const u8, resolve_path.joinAbs(filesystem.top_level_dir, .auto, dirname));
var progress = std.Progress{};
- var node = progress.start(try ProgressBuf.print("Loading {any}", .{template}), 0);
+ var node = progress.start(try ProgressBuf.print("Loading {s}", .{template}), 0);
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
// alacritty is fast
@@ -399,7 +399,7 @@ pub const CreateCommand = struct {
var package_json_file: ?std.fs.File = null;
if (create_options.verbose) {
- Output.prettyErrorln("Downloading as {any}\n", .{@tagName(example_tag)});
+ Output.prettyErrorln("Downloading as {s}\n", .{@tagName(example_tag)});
}
switch (example_tag) {
@@ -411,7 +411,7 @@ pub const CreateCommand = struct {
node.end();
progress.refresh();
- Output.prettyError("\n<r><red>error:<r> <b>\"{any}\"<r> was not found. Here are templates you can use:\n\n", .{
+ Output.prettyError("\n<r><red>error:<r> <b>\"{s}\"<r> was not found. Here are templates you can use:\n\n", .{
template,
});
Output.flush();
@@ -444,7 +444,7 @@ pub const CreateCommand = struct {
node.end();
progress.refresh();
- Output.prettyError("\n<r><red>error:<r> <b>\"{any}\"<r> was not found on GitHub. Here are templates you can use:\n\n", .{
+ Output.prettyError("\n<r><red>error:<r> <b>\"{s}\"<r> was not found on GitHub. Here are templates you can use:\n\n", .{
template,
});
Output.flush();
@@ -466,7 +466,7 @@ pub const CreateCommand = struct {
else => unreachable,
};
- node.name = try ProgressBuf.print("Decompressing {any}", .{template});
+ node.name = try ProgressBuf.print("Decompressing {s}", .{template});
node.setCompletedItems(0);
node.setEstimatedTotalItems(0);
@@ -479,7 +479,7 @@ pub const CreateCommand = struct {
try gunzip.readAll();
gunzip.deinit();
- node.name = try ProgressBuf.print("Extracting {any}", .{template});
+ node.name = try ProgressBuf.print("Extracting {s}", .{template});
node.setCompletedItems(0);
node.setEstimatedTotalItems(0);
@@ -516,21 +516,21 @@ pub const CreateCommand = struct {
// Thank you create-react-app for this copy (and idea)
Output.prettyErrorln(
- "<r>\n<red>error<r><d>: <r>The directory <b><blue>{any}<r>/ contains files that could conflict:\n\n",
+ "<r>\n<red>error<r><d>: <r>The directory <b><blue>{s}<r>/ contains files that could conflict:\n\n",
.{
std.fs.path.basename(destination),
},
);
for (archive_context.overwrite_list.keys()) |path| {
if (strings.endsWith(path, std.fs.path.sep_str)) {
- Output.prettyError("<r> <blue>{any}<r>", .{path[0 .. std.math.max(path.len, 1) - 1]});
+ Output.prettyError("<r> <blue>{s}<r>", .{path[0 .. std.math.max(path.len, 1) - 1]});
Output.prettyErrorln(std.fs.path.sep_str, .{});
} else {
- Output.prettyErrorln("<r> {any}", .{path});
+ Output.prettyErrorln("<r> {s}", .{path});
}
}
- Output.prettyErrorln("<r>\n<d>To download {any} anyway, use --force<r>", .{template});
+ Output.prettyErrorln("<r>\n<d>To download {s} anyway, use --force<r>", .{template});
Global.exit(1);
}
}
@@ -568,7 +568,7 @@ pub const CreateCommand = struct {
node.end();
progress.refresh();
- Output.prettyErrorln("<r><red>{any}<r>: opening dir {any}", .{ @errorName(err), template });
+ Output.prettyErrorln("<r><red>{s}<r>: opening dir {s}", .{ @errorName(err), template });
Global.exit(1);
};
@@ -578,7 +578,7 @@ pub const CreateCommand = struct {
progress.refresh();
- Output.prettyErrorln("<r><red>{any}<r>: creating dir {any}", .{ @errorName(err), destination });
+ Output.prettyErrorln("<r><red>{s}<r>: creating dir {s}", .{ @errorName(err), destination });
Global.exit(1);
};
@@ -605,14 +605,14 @@ pub const CreateCommand = struct {
progress_.refresh();
- Output.prettyErrorln("<r><red>{any}<r>: copying file {any}", .{ @errorName(err), entry.path });
+ Output.prettyErrorln("<r><red>{s}<r>: copying file {s}", .{ @errorName(err), entry.path });
Global.exit(1);
};
};
defer outfile.close();
defer node_.completeOne();
- var infile = try entry.dir.openFile(entry.basename, .{ .mode = .read_only });
+ var infile = try entry.dir.dir.openFile(entry.basename, .{ .mode = .read_only });
defer infile.close();
// Assumption: you only really care about making sure something that was executable is still executable
@@ -620,12 +620,12 @@ pub const CreateCommand = struct {
_ = C.fchmod(outfile.handle, stat.mode);
CopyFile.copy(infile.handle, outfile.handle) catch {
- entry.dir.copyFile(entry.basename, destination_dir_, entry.path, .{}) catch |err| {
+ entry.dir.dir.copyFile(entry.basename, destination_dir_, entry.path, .{}) catch |err| {
node_.end();
progress_.refresh();
- Output.prettyErrorln("<r><red>{any}<r>: copying file {any}", .{ @errorName(err), entry.path });
+ Output.prettyErrorln("<r><red>{s}<r>: copying file {s}", .{ @errorName(err), entry.path });
Global.exit(1);
};
};
@@ -633,9 +633,9 @@ pub const CreateCommand = struct {
}
};
- try FileCopier.copy(destination_dir, &walker_, node, &progress);
+ try FileCopier.copy(destination_dir.dir, &walker_, node, &progress);
- package_json_file = destination_dir.openFile("package.json", .{ .mode = .read_write }) catch null;
+ package_json_file = destination_dir.dir.openFile("package.json", .{ .mode = .read_write }) catch null;
read_package_json: {
if (package_json_file) |pkg| {
@@ -645,7 +645,7 @@ pub const CreateCommand = struct {
progress.refresh();
package_json_file = null;
- Output.prettyErrorln("Error reading package.json: <r><red>{any}", .{@errorName(err)});
+ Output.prettyErrorln("Error reading package.json: <r><red>{s}", .{@errorName(err)});
break :read_package_json;
};
@@ -666,7 +666,7 @@ pub const CreateCommand = struct {
progress.refresh();
- Output.prettyErrorln("Error reading package.json: <r><red>{any}", .{@errorName(err)});
+ Output.prettyErrorln("Error reading package.json: <r><red>{s}", .{@errorName(err)});
break :read_package_json;
};
// The printer doesn't truncate, so we must do so manually
@@ -1280,7 +1280,7 @@ pub const CreateCommand = struct {
create_react_app_entry_point_path = std.fmt.allocPrint(
ctx.allocator,
- "./{any}",
+ "./{s}",
.{
std.mem.trimLeft(
@@ -1292,7 +1292,7 @@ pub const CreateCommand = struct {
) catch break :bail;
html_writer.print(
- "<script type=\"module\" async src=\"/{any}\"></script>\n{any}",
+ "<script type=\"module\" async src=\"/{s}\"></script>\n{s}",
.{
create_react_app_entry_point_path[2..],
public_index_file_contents[body_closing_tag..],
@@ -1321,7 +1321,7 @@ pub const CreateCommand = struct {
std.os.ftruncate(public_index_html_file.handle, outfile.len + 1) catch break :bail;
bun_bun_for_react_scripts = true;
is_create_react_app = true;
- Output.prettyln("<r><d>[package.json] Added entry point {any} to public/index.html", .{create_react_app_entry_point_path});
+ Output.prettyln("<r><d>[package.json] Added entry point {s} to public/index.html", .{create_react_app_entry_point_path});
}
}
@@ -1352,7 +1352,7 @@ pub const CreateCommand = struct {
strings.contains(script, "react-scripts eject"))
{
if (create_options.verbose) {
- Output.prettyErrorln("<r><d>[package.json] Pruned unnecessary script: {any}<r>", .{script});
+ Output.prettyErrorln("<r><d>[package.json] Pruned unnecessary script: {s}<r>", .{script});
}
continue;
@@ -1451,7 +1451,7 @@ pub const CreateCommand = struct {
var package_json_writer = JSPrinter.NewFileWriter(package_json_file.?);
const written = JSPrinter.printJSON(@TypeOf(package_json_writer), package_json_writer, package_json_expr, &source) catch |err| {
- Output.prettyErrorln("package.json failed to write due to error {any}", .{@errorName(err)});
+ Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)});
package_json_file = null;
break :process_package_json;
};
@@ -1462,7 +1462,7 @@ pub const CreateCommand = struct {
if (needs.bun_bun_for_nextjs) {
try postinstall_tasks.append(ctx.allocator, InjectionPrefill.bun_bun_for_nextjs_task);
} else if (bun_bun_for_react_scripts) {
- try postinstall_tasks.append(ctx.allocator, try std.fmt.allocPrint(ctx.allocator, "bun bun {any}", .{create_react_app_entry_point_path}));
+ try postinstall_tasks.append(ctx.allocator, try std.fmt.allocPrint(ctx.allocator, "bun bun {s}", .{create_react_app_entry_point_path}));
}
}
}
@@ -1505,12 +1505,12 @@ pub const CreateCommand = struct {
const start_time = std.time.nanoTimestamp();
const install_args = &[_]string{ npm_client.bin, "install" };
Output.flush();
- Output.pretty("\n<r><d>$ <b><cyan>{any}<r><d> install", .{@tagName(npm_client.tag)});
+ Output.pretty("\n<r><d>$ <b><cyan>{s}<r><d> install", .{@tagName(npm_client.tag)});
if (install_args.len > 2) {
for (install_args[2..]) |arg| {
Output.pretty(" ", .{});
- Output.pretty("{any}", .{arg});
+ Output.pretty("{s}", .{arg});
}
}
@@ -1523,7 +1523,7 @@ pub const CreateCommand = struct {
defer {
Output.printErrorln("\n", .{});
Output.printStartEnd(start_time, std.time.nanoTimestamp());
- Output.prettyError(" <r><d>{any} install<r>\n", .{@tagName(npm_client.tag)});
+ Output.prettyError(" <r><d>{s} install<r>\n", .{@tagName(npm_client.tag)});
Output.flush();
Output.print("\n", .{});
@@ -1547,7 +1547,7 @@ pub const CreateCommand = struct {
Output.printError("\n", .{});
Output.printStartEnd(ctx.start_time, std.time.nanoTimestamp());
- Output.prettyErrorln(" <r><d>bun create {any}<r>", .{template});
+ Output.prettyErrorln(" <r><d>bun create {s}<r>", .{template});
Output.flush();
@@ -1604,13 +1604,13 @@ pub const CreateCommand = struct {
Output.pretty(
\\
- \\<b><green>Success!<r> <b>{any}<r> loaded into <b>{any}<r>
+ \\<b><green>Success!<r> <b>{s}<r> loaded into <b>{s}<r>
\\
, .{ display_name, std.fs.path.basename(destination) });
} else {
Output.pretty(
\\
- \\<b>Created <green>{any}<r> project successfully
+ \\<b>Created <green>{s}<r> project successfully
\\
, .{std.fs.path.basename(template)});
}
@@ -1628,7 +1628,7 @@ pub const CreateCommand = struct {
\\
\\<r><d>#<r> When dependencies change, run this to update node_modules.bun:
\\
- \\ <b><cyan>bun bun {any}<r>
+ \\ <b><cyan>bun bun {s}<r>
\\
, .{create_react_app_entry_point_path});
}
@@ -1637,8 +1637,8 @@ pub const CreateCommand = struct {
\\
\\<d>#<r><b> To get started, run:<r>
\\
- \\ <b><cyan>cd {any}<r>
- \\ <b><cyan>{any}<r>
+ \\ <b><cyan>cd {s}<r>
+ \\ <b><cyan>{s}<r>
\\
\\
, .{
@@ -1698,16 +1698,16 @@ pub const Example = struct {
var app_name_buf: [512]u8 = undefined;
pub fn print(examples: []const Example, default_app_name: ?string) void {
for (examples) |example| {
- var app_name = default_app_name orelse (std.fmt.bufPrint(&app_name_buf, "./{any}-app", .{example.name[0..std.math.min(example.name.len, 492)]}) catch unreachable);
+ var app_name = default_app_name orelse (std.fmt.bufPrint(&app_name_buf, "./{s}-app", .{example.name[0..std.math.min(example.name.len, 492)]}) catch unreachable);
if (example.description.len > 0) {
- Output.pretty(" <r># {any}<r>\n <b>bun create <cyan>{any}<r><b> {any}<r>\n<d> \n\n", .{
+ Output.pretty(" <r># {s}<r>\n <b>bun create <cyan>{s}<r><b> {s}<r>\n<d> \n\n", .{
example.description,
example.name,
app_name,
});
} else {
- Output.pretty(" <r><b>bun create <cyan>{any}<r><b> {any}<r>\n\n", .{
+ Output.pretty(" <r><b>bun create <cyan>{s}<r><b> {s}<r>\n\n", .{
example.name,
app_name,
});
@@ -1721,28 +1721,28 @@ pub const Example = struct {
var examples = std.ArrayList(Example).fromOwnedSlice(ctx.allocator, remote_examples);
{
- var folders = [3]std.fs.IterableDir{ std.fs.Dir{ .fd = 0 }, std.fs.Dir{ .fd = 0 }, std.fs.Dir{ .fd = 0 } };
+ var folders = [3]std.fs.IterableDir{ .{.dir = .{ .fd = 0 }}, .{.dir = .{ .fd = 0 }}, .{.dir = .{ .fd = 0 }} };
if (env_loader.map.get("BUN_CREATE_DIR")) |home_dir| {
var parts = [_]string{home_dir};
var outdir_path = filesystem.absBuf(&parts, &home_dir_buf);
- folders[0] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch std.fs.Dir{ .fd = 0 };
+ folders[0] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch .{.dir = .{ .fd = 0 }};
}
{
var parts = [_]string{ filesystem.top_level_dir, BUN_CREATE_DIR };
var outdir_path = filesystem.absBuf(&parts, &home_dir_buf);
- folders[1] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch std.fs.Dir{ .fd = 0 };
+ folders[1] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch .{.dir = .{ .fd = 0 }};
}
if (env_loader.map.get("HOME")) |home_dir| {
var parts = [_]string{ home_dir, BUN_CREATE_DIR };
var outdir_path = filesystem.absBuf(&parts, &home_dir_buf);
- folders[2] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch std.fs.Dir{ .fd = 0 };
+ folders[2] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch .{.dir = .{ .fd = 0 }};
}
// subfolders with package.json
for (folders) |folder_| {
- if (folder_.fd != 0) {
+ if (folder_.dir.fd != 0) {
const folder: std.fs.IterableDir = folder_;
var iter = folder.iterate();
@@ -1764,7 +1764,7 @@ pub const Example = struct {
var path: [:0]u8 = home_dir_buf[0 .. entry.name.len + 1 + "package.json".len :0];
- folder.accessZ(path, .{ .mode = .read_only }) catch continue :loop;
+ folder.dir.accessZ(path, .{ .mode = .read_only }) catch continue :loop;
try examples.append(
Example{
@@ -1802,7 +1802,7 @@ pub const Example = struct {
repository = repository[0..i];
}
- progress.name = try ProgressBuf.pretty("<d>[github] <b>GET<r> <blue>{any}/{any}<r>", .{ owner, repository });
+ progress.name = try ProgressBuf.pretty("<d>[github] <b>GET<r> <blue>{s}/{s}<r>", .{ owner, repository });
refresher.refresh();
var github_api_domain: string = "api.github.com";
@@ -1815,7 +1815,7 @@ pub const Example = struct {
var api_url = URL.parse(
try std.fmt.bufPrint(
&github_repository_url_buf,
- "https://{any}/repos/{any}/{any}/tarball",
+ "https://{s}/repos/{s}/{s}/tarball",
.{ github_api_domain, owner, repository },
),
);
@@ -1825,7 +1825,7 @@ pub const Example = struct {
if (env_loader.map.get("GITHUB_ACCESS_TOKEN")) |access_token| {
if (access_token.len > 0) {
- headers_buf = try std.fmt.allocPrint(ctx.allocator, "Access-TokenBearer {any}", .{access_token});
+ headers_buf = try std.fmt.allocPrint(ctx.allocator, "Access-TokenBearer {s}", .{access_token});
try header_entries.append(
ctx.allocator,
Headers.Kv{
@@ -1887,7 +1887,7 @@ pub const Example = struct {
refresher.refresh();
if (content_type.len > 0) {
- Output.prettyErrorln("<r><red>error<r>: Unexpected content type from GitHub: {any}", .{content_type});
+ Output.prettyErrorln("<r><red>error<r>: Unexpected content type from GitHub: {s}", .{content_type});
Global.crash();
} else {
Output.prettyErrorln("<r><red>error<r>: Invalid response from GitHub (missing content type)", .{});
@@ -1914,7 +1914,7 @@ pub const Example = struct {
var mutable = try ctx.allocator.create(MutableString);
mutable.* = try MutableString.init(ctx.allocator, 2048);
- url = URL.parse(try std.fmt.bufPrint(&url_buf, "https://registry.npmjs.org/@bun-examples/{any}/latest", .{name}));
+ url = URL.parse(try std.fmt.bufPrint(&url_buf, "https://registry.npmjs.org/@bun-examples/{s}/latest", .{name}));
// ensure very stable memory address
var async_http: *HTTP.AsyncHTTP = ctx.allocator.create(HTTP.AsyncHTTP) catch unreachable;
@@ -1956,7 +1956,7 @@ pub const Example = struct {
}
Global.exit(1);
} else {
- Output.prettyErrorln("Error parsing package: <r><red>{any}<r>", .{@errorName(err)});
+ Output.prettyErrorln("Error parsing package: <r><red>{s}<r>", .{@errorName(err)});
Global.exit(1);
}
};
@@ -2058,14 +2058,14 @@ pub const Example = struct {
Global.exit(1);
},
else => {
- Output.prettyErrorln("<r><red>{any}<r> while trying to fetch examples list. Please try again", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>{s}<r> while trying to fetch examples list. Please try again", .{@errorName(err)});
Global.exit(1);
},
}
};
if (response.status_code != 200) {
- Output.prettyErrorln("<r><red>{d}<r> fetching examples :( {any}", .{ response.status_code, mutable.list.items });
+ Output.prettyErrorln("<r><red>{d}<r> fetching examples :( {s}", .{ response.status_code, mutable.list.items });
Global.exit(1);
}
@@ -2080,7 +2080,7 @@ pub const Example = struct {
}
Global.exit(1);
} else {
- Output.prettyErrorln("Error parsing examples: <r><red>{any}<r>", .{@errorName(err)});
+ Output.prettyErrorln("Error parsing examples: <r><red>{s}<r>", .{@errorName(err)});
Global.exit(1);
}
};
@@ -2114,7 +2114,7 @@ pub const Example = struct {
}
}
- Output.prettyErrorln("Corrupt examples data: expected object but received {any}", .{@tagName(examples_object.data)});
+ Output.prettyErrorln("Corrupt examples data: expected object but received {s}", .{@tagName(examples_object.data)});
Global.exit(1);
}
};
@@ -2146,7 +2146,7 @@ pub const CreateListExamplesCommand = struct {
if (env_loader.map.get("HOME")) |homedir| {
Output.prettyln(
- "<d>This command is completely optional. To add a new local template, create a folder in {any}/.bun-create/. To publish a new template, git clone https://github.com/oven-sh/bun, add a new folder to the \"examples\" folder, and submit a PR.<r>",
+ "<d>This command is completely optional. To add a new local template, create a folder in {s}/.bun-create/. To publish a new template, git clone https://github.com/oven-sh/bun, add a new folder to the \"examples\" folder, and submit a PR.<r>",
.{homedir},
);
} else {
@@ -2171,7 +2171,7 @@ const GitHandler = struct {
success = std.atomic.Atomic(u32).init(0);
thread = std.Thread.spawn(.{}, spawnThread, .{ destination, PATH, verbose }) catch |err| {
- Output.prettyErrorln("<r><red>{any}<r>", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>{s}<r>", .{@errorName(err)});
Global.exit(1);
};
}
@@ -2233,7 +2233,7 @@ const GitHandler = struct {
};
if (comptime verbose) {
- Output.prettyErrorln("git backend: {any}", .{git});
+ Output.prettyErrorln("git backend: {s}", .{git});
}
// same names, just comptime known values
diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig
index b24800ef1..70a4519a6 100644
--- a/src/cli/install_completions_command.zig
+++ b/src/cli/install_completions_command.zig
@@ -70,7 +70,7 @@ pub const InstallCompletionsCommand = struct {
}
var completions_dir: string = "";
- var output_dir: std.fs.Dir = found: {
+ var output_dir: std.fs.IterableDir = found: {
var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
var cwd = std.os.getcwd(&cwd_buf) catch {
Output.prettyErrorln("<r><red>error<r>: Could not get current working directory", .{});
@@ -93,14 +93,14 @@ pub const InstallCompletionsCommand = struct {
}
if (!std.fs.path.isAbsolute(completions_dir)) {
- Output.prettyErrorln("<r><red>error:<r> Please pass an absolute path. {any} is invalid", .{completions_dir});
+ Output.prettyErrorln("<r><red>error:<r> Please pass an absolute path. {s} is invalid", .{completions_dir});
Global.exit(fail_exit_code);
}
break :found std.fs.openIterableDirAbsolute(completions_dir, .{
}) catch |err| {
- Output.prettyErrorln("<r><red>error:<r> accessing {any} errored {any}", .{ completions_dir, @errorName(err) });
+ Output.prettyErrorln("<r><red>error:<r> accessing {s} errored {s}", .{ completions_dir, @errorName(err) });
Global.exit(fail_exit_code);
};
}
@@ -302,10 +302,10 @@ pub const InstallCompletionsCommand = struct {
std.debug.assert(completions_dir.len > 0);
- var output_file = output_dir.createFileZ(filename, .{
+ var output_file = output_dir.dir.createFileZ(filename, .{
.truncate = true,
}) catch |err| {
- Output.prettyErrorln("<r><red>error:<r> Could not open {any} for writing: {any}", .{
+ Output.prettyErrorln("<r><red>error:<r> Could not open {s} for writing: {s}", .{
filename,
@errorName(err),
});
@@ -313,7 +313,7 @@ pub const InstallCompletionsCommand = struct {
};
output_file.writeAll(shell.completions()) catch |err| {
- Output.prettyErrorln("<r><red>error:<r> Could not write to {any}: {any}", .{
+ Output.prettyErrorln("<r><red>error:<r> Could not write to {s}: {s}", .{
filename,
@errorName(err),
});
@@ -321,7 +321,7 @@ pub const InstallCompletionsCommand = struct {
};
defer output_file.close();
- output_dir.close();
+ output_dir.dir.close();
// Check if they need to load the zsh completions file into their .zshrc
if (shell == .zsh) {
@@ -394,7 +394,7 @@ pub const InstallCompletionsCommand = struct {
// We need to add it to the end of the file
var remaining = buf[read..];
- var extra = std.fmt.bufPrint(remaining, "\n# bun completions\n[ -s \"{any}\" ] && source \"{any}\"\n", .{
+ var extra = std.fmt.bufPrint(remaining, "\n# bun completions\n[ -s \"{s}\" ] && source \"{s}\"\n", .{
completions_path,
completions_path,
}) catch unreachable;
@@ -406,14 +406,14 @@ pub const InstallCompletionsCommand = struct {
};
if (needs_to_tell_them_to_add_completions_file) {
- Output.prettyErrorln("<r>To enable completions, add this to your .zshrc:\n <b>[ -s \"{any}\" ] && source \"{any}\"", .{
+ Output.prettyErrorln("<r>To enable completions, add this to your .zshrc:\n <b>[ -s \"{s}\" ] && source \"{s}\"", .{
completions_path,
completions_path,
});
}
}
- Output.prettyErrorln("<r><d>Installed completions to {any}/{any}<r>\n", .{
+ Output.prettyErrorln("<r><d>Installed completions to {s}/{s}<r>\n", .{
completions_dir,
filename,
});
diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig
index d81d8cba7..36e062efd 100644
--- a/src/cli/package_manager_command.zig
+++ b/src/cli/package_manager_command.zig
@@ -27,7 +27,7 @@ pub const PackageManagerCommand = struct {
if (load_lockfile == .err) {
if (pm.options.log_level != .silent)
- Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)});
+ Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
Global.exit(1);
}
@@ -59,7 +59,7 @@ pub const PackageManagerCommand = struct {
if (strings.eqlComptime(first, "bin")) {
var output_path = Path.joinAbs(Fs.FileSystem.instance.top_level_dir, .auto, std.mem.span(pm.options.bin_path));
- Output.prettyln("{any}", .{output_path});
+ Output.prettyln("{s}", .{output_path});
if (Output.stdout_descriptor_type == .terminal) {
Output.prettyln("\n", .{});
}
@@ -93,7 +93,7 @@ pub const PackageManagerCommand = struct {
if (load_lockfile == .err) {
if (pm.options.log_level != .silent)
- Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)});
+ Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
Global.exit(1);
}
@@ -114,7 +114,7 @@ pub const PackageManagerCommand = struct {
if (load_lockfile == .err) {
if (pm.options.log_level != .silent)
- Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)});
+ Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
Global.exit(1);
}
@@ -133,7 +133,7 @@ pub const PackageManagerCommand = struct {
if (load_lockfile == .err) {
if (pm.options.log_level != .silent)
- Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)});
+ Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
Global.exit(1);
}
@@ -143,16 +143,16 @@ pub const PackageManagerCommand = struct {
var dir: [bun.MAX_PATH_BYTES]u8 = undefined;
var fd = pm.getCacheDirectory();
var outpath = std.os.getFdPath(fd.fd, &dir) catch |err| {
- Output.prettyErrorln("{any} getting cache directory", .{@errorName(err)});
+ Output.prettyErrorln("{s} getting cache directory", .{@errorName(err)});
Global.crash();
};
if (pm.options.positionals.len > 0 and strings.eqlComptime(pm.options.positionals[0], "rm")) {
std.fs.deleteTreeAbsolute(outpath) catch |err| {
- Output.prettyErrorln("{any} deleting cache directory", .{@errorName(err)});
+ Output.prettyErrorln("{s} deleting cache directory", .{@errorName(err)});
Global.crash();
};
- Output.prettyln("Cache directory deleted:\n {any}", .{outpath});
+ Output.prettyln("Cache directory deleted:\n {s}", .{outpath});
Global.exit(0);
}
Output.writer().writeAll(outpath) catch {};
@@ -173,7 +173,7 @@ pub const PackageManagerCommand = struct {
, .{});
if (first.len > 0) {
- Output.prettyErrorln("\n<red>error<r>: \"{any}\" unknown command\n", .{first});
+ Output.prettyErrorln("\n<red>error<r>: \"{s}\" unknown command\n", .{first});
Output.flush();
Global.exit(1);
diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig
index 7685f466d..161b1362b 100644
--- a/src/cli/run_command.zig
+++ b/src/cli/run_command.zig
@@ -82,7 +82,7 @@ pub const RunCommand = struct {
}
const BUN_BIN_NAME = if (Environment.isDebug) "bun-debug" else "bun";
- const BUN_RUN = std.fmt.comptimePrint("{any} run", .{BUN_BIN_NAME});
+ const BUN_RUN = std.fmt.comptimePrint("{s} run", .{BUN_BIN_NAME});
// Look for invocations of any:
// - yarn run
@@ -244,7 +244,7 @@ pub const RunCommand = struct {
var child_process = std.ChildProcess.init(&argv, allocator);
if (!silent) {
- Output.prettyErrorln("<r><d><magenta>$<r> <d><b>{any}<r>", .{combined_script});
+ Output.prettyErrorln("<r><d><magenta>$<r> <d><b>{s}<r>", .{combined_script});
Output.flush();
}
@@ -257,13 +257,13 @@ pub const RunCommand = struct {
child_process.stdout_behavior = .Inherit;
const result = child_process.spawnAndWait() catch |err| {
- Output.prettyErrorln("<r><red>error<r>: Failed to run script <b>{any}<r> due to error <b>{any}<r>", .{ name, @errorName(err) });
+ Output.prettyErrorln("<r><red>error<r>: Failed to run script <b>{s}<r> due to error <b>{s}<r>", .{ name, @errorName(err) });
Output.flush();
return true;
};
if (result.Exited > 0) {
- Output.prettyErrorln("<r><red>Script error<r> <b>\"{any}\"<r> exited with {d} status<r>", .{ name, result.Exited });
+ Output.prettyErrorln("<r><red>Script error<r> <b>\"{s}\"<r> exited with {d} status<r>", .{ name, result.Exited });
Output.flush();
Global.exit(result.Exited);
@@ -304,30 +304,30 @@ pub const RunCommand = struct {
const rc = bun.C.stat(std.meta.assumeSentinel(executable, 0), &stat);
if (rc == 0) {
if (std.os.S.ISDIR(stat.mode)) {
- Output.prettyErrorln("<r><red>error<r>: Failed to run directory \"<b>{any}<r>\"\n", .{executable});
+ Output.prettyErrorln("<r><red>error<r>: Failed to run directory \"<b>{s}<r>\"\n", .{executable});
Global.exit(1);
}
}
}
}
- Output.prettyErrorln("<r><red>error<r>: Failed to run \"<b>{any}<r>\" due to error <b>{any}<r>", .{ std.fs.path.basename(executable), @errorName(err) });
+ Output.prettyErrorln("<r><red>error<r>: Failed to run \"<b>{s}<r>\" due to error <b>{s}<r>", .{ std.fs.path.basename(executable), @errorName(err) });
Global.exit(1);
};
switch (result) {
.Exited => |code| {
- Output.prettyErrorln("<r><red>error<r> \"<b>{any}<r>\" exited with {d} status<r>", .{ std.fs.path.basename(executable), code });
+ Output.prettyErrorln("<r><red>error<r> \"<b>{s}<r>\" exited with {d} status<r>", .{ std.fs.path.basename(executable), code });
Global.exit(code);
},
.Signal => |sig| {
- Output.prettyErrorln("<r><red>error<r> \"<b>{any}<r>\" signaled {d}<r>", .{ std.fs.path.basename(executable), sig });
+ Output.prettyErrorln("<r><red>error<r> \"<b>{s}<r>\" signaled {d}<r>", .{ std.fs.path.basename(executable), sig });
Global.exit(1);
},
.Stopped => |sig| {
- Output.prettyErrorln("<r><red>error<r> \"<b>{any}<r>\" stopped: {d}<r>", .{ std.fs.path.basename(executable), sig });
+ Output.prettyErrorln("<r><red>error<r> \"<b>{s}<r>\" stopped: {d}<r>", .{ std.fs.path.basename(executable), sig });
Global.exit(1);
},
.Unknown => |sig| {
- Output.prettyErrorln("<r><red>error<r> \"<b>{any}<r>\" stopped: {d}<r>", .{ std.fs.path.basename(executable), sig });
+ Output.prettyErrorln("<r><red>error<r> \"<b>{s}<r>\" stopped: {d}<r>", .{ std.fs.path.basename(executable), sig });
Global.exit(1);
},
}
@@ -379,7 +379,7 @@ pub const RunCommand = struct {
} else {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
- Output.prettyErrorln("Error loading directory: \"{any}\"", .{@errorName(err)});
+ Output.prettyErrorln("Error loading directory: \"{s}\"", .{@errorName(err)});
Output.flush();
return err;
} orelse {
@@ -761,7 +761,7 @@ pub const RunCommand = struct {
// "White space after #! is optional."
var shebang_buf: [64]u8 = undefined;
const shebang_size = file.pread(&shebang_buf, 0) catch |err| {
- Output.prettyErrorln("<r><red>error<r>: Failed to read file <b>{any}<r> due to error <b>{any}<r>", .{ file_path, @errorName(err) });
+ Output.prettyErrorln("<r><red>error<r>: Failed to read file <b>{s}<r> due to error <b>{s}<r>", .{ file_path, @errorName(err) });
Global.exit(1);
};
@@ -786,7 +786,7 @@ pub const RunCommand = struct {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
}
- Output.prettyErrorln("<r><red>error<r>: Failed to run <b>{any}<r> due to error <b>{any}<r>", .{
+ Output.prettyErrorln("<r><red>error<r>: Failed to run <b>{s}<r> due to error <b>{s}<r>", .{
std.fs.path.basename(file_path),
@errorName(err),
});
@@ -824,11 +824,11 @@ pub const RunCommand = struct {
if (scripts.count() > 0) {
did_print = true;
- Output.prettyln("<r><blue><b>{any}<r> scripts:<r>\n", .{display_name});
+ Output.prettyln("<r><blue><b>{s}<r> scripts:<r>\n", .{display_name});
while (iterator.next()) |entry| {
Output.prettyln("\n", .{});
- Output.prettyln(" bun run <blue>{any}<r>\n", .{entry.key_ptr.*});
- Output.prettyln(" <d> {any}<r>\n", .{entry.value_ptr.*});
+ Output.prettyln(" bun run <blue>{s}<r>\n", .{entry.key_ptr.*});
+ Output.prettyln(" <d> {s}<r>\n", .{entry.value_ptr.*});
}
Output.prettyln("\n<d>{d} scripts<r>", .{scripts.count()});
@@ -837,7 +837,7 @@ pub const RunCommand = struct {
return true;
} else {
- Output.prettyln("<r><blue><b>{any}<r> has no \"scripts\" in package.json.", .{display_name});
+ Output.prettyln("<r><blue><b>{s}<r> has no \"scripts\" in package.json.", .{display_name});
Output.flush();
return true;
}
@@ -845,7 +845,7 @@ pub const RunCommand = struct {
else => {
if (scripts.get(script_name_to_search)) |script_content| {
// allocate enough to hold "post${scriptname}"
- var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{any}", .{script_name_to_search});
+ var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{s}", .{script_name_to_search});
if (scripts.get(temp_script_buffer[1..])) |prescript| {
if (!try runPackageScript(
@@ -918,13 +918,13 @@ pub const RunCommand = struct {
// var file = std.fs.openFileAbsoluteZ(destination, .{ .mode = .read_only }) catch |err| {
// if (!log_errors) return false;
- // Output.prettyErrorln("<r>error: <red>{any}<r> opening file: \"{any}\"", .{ err, std.mem.span(destination) });
+ // Output.prettyErrorln("<r>error: <red>{s}<r> opening file: \"{s}\"", .{ err, std.mem.span(destination) });
// Output.flush();
// return err;
// };
// // var outbuf = std.os.getFdPath(file.handle, &path_buf2) catch |err| {
// // if (!log_errors) return false;
- // // Output.prettyErrorln("<r>error: <red>{any}<r> resolving file: \"{any}\"", .{ err, std.mem.span(destination) });
+ // // Output.prettyErrorln("<r>error: <red>{s}<r> resolving file: \"{s}\"", .{ err, std.mem.span(destination) });
// // Output.flush();
// // return err;
// // };
@@ -943,7 +943,7 @@ pub const RunCommand = struct {
}
if (comptime log_errors) {
- Output.prettyError("<r><red>error:<r> Missing script \"<b>{any}<r>\"\n", .{script_name_to_search});
+ Output.prettyError("<r><red>error:<r> Missing script \"<b>{s}<r>\"\n", .{script_name_to_search});
Global.exit(0);
}
diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig
index 5f11ce889..455f51dbe 100644
--- a/src/cli/test_command.zig
+++ b/src/cli/test_command.zig
@@ -180,7 +180,7 @@ const Scanner = struct {
name: strings.StringOrTinyString,
};
- fn readDirWithName(this: *Scanner, name: string, handle: ?std.fs.Dir) !*FileSystem.RealFS.EntriesOption {
+ fn readDirWithName(this: *Scanner, name: string, handle: ?std.fs.IterableDir) !*FileSystem.RealFS.EntriesOption {
return try this.fs.fs.readDirectoryWithIterator(name, handle, *Scanner, this);
}
@@ -215,9 +215,9 @@ const Scanner = struct {
var path2 = this.fs.absBuf(parts2, &this.open_dir_buf);
this.open_dir_buf[path2.len] = 0;
var pathZ = this.open_dir_buf[path2.len - entry.name.slice().len .. path2.len :0];
- var child_dir = dir.openIterableDirZ(pathZ, .{}) catch continue;
+ var child_dir = bun.openIterableDirZFromDir(dir, pathZ) catch continue;
path2 = this.fs.dirname_store.append(string, path2) catch unreachable;
- FileSystem.setMaxFd(child_dir.fd);
+ FileSystem.setMaxFd(child_dir.dir.fd);
_ = this.readDirWithName(path2, child_dir) catch continue;
}
}
diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig
index 90e3fc376..44230a488 100644
--- a/src/cli/upgrade_command.zig
+++ b/src/cli/upgrade_command.zig
@@ -62,7 +62,7 @@ pub const Version = struct {
return std.fmt.allocPrint(
bun.default_allocator,
- "bun-canary-timestamp-{any}",
+ "bun-canary-timestamp-{s}",
.{
std.fmt.fmtSliceHexLower(
std.mem.asBytes(
@@ -92,7 +92,7 @@ pub const Version = struct {
const current_version: string = "bun-v" ++ Global.package_json_version;
- pub export const Bun__githubURL: [*:0]const u8 = std.fmt.comptimePrint("https://github.com/oven-sh/bun/release/bun-v{any}/{any}", .{
+ pub export const Bun__githubURL: [*:0]const u8 = std.fmt.comptimePrint("https://github.com/oven-sh/bun/release/bun-v{s}/{s}", .{
Global.package_json_version,
zip_filename,
});
@@ -133,7 +133,7 @@ pub const UpgradeCheckerThread = struct {
if (!version.isCurrent()) {
if (version.name()) |name| {
- Output.prettyErrorln("\n<r><d>bun v{any} is out. Run <b><cyan>bun upgrade<r> to upgrade.\n", .{name});
+ Output.prettyErrorln("\n<r><d>bun v{s} is out. Run <b><cyan>bun upgrade<r> to upgrade.\n", .{name});
Output.flush();
}
}
@@ -144,7 +144,7 @@ pub const UpgradeCheckerThread = struct {
fn run(env_loader: *DotEnv.Loader) void {
_run(env_loader) catch |err| {
if (Environment.isDebug) {
- std.debug.print("\n[UpgradeChecker] ERROR: {any}\n", .{@errorName(err)});
+ std.debug.print("\n[UpgradeChecker] ERROR: {s}\n", .{@errorName(err)});
}
};
}
@@ -193,7 +193,7 @@ pub const UpgradeCommand = struct {
var api_url = URL.parse(
try std.fmt.bufPrint(
&github_repository_url_buf,
- "https://{any}/repos/Jarred-Sumner/bun-releases-for-updater/releases/latest",
+ "https://{s}/repos/Jarred-Sumner/bun-releases-for-updater/releases/latest",
.{
github_api_domain,
},
@@ -202,7 +202,7 @@ pub const UpgradeCommand = struct {
if (env_loader.map.get("GITHUB_ACCESS_TOKEN")) |access_token| {
if (access_token.len > 0) {
- headers_buf = try std.fmt.allocPrint(allocator, default_github_headers ++ "Access-TokenBearer {any}", .{access_token});
+ headers_buf = try std.fmt.allocPrint(allocator, default_github_headers ++ "Access-TokenBearer {s}", .{access_token});
try header_entries.append(
allocator,
Headers.Kv{
@@ -262,7 +262,7 @@ pub const UpgradeCommand = struct {
}
Global.exit(1);
} else {
- Output.prettyErrorln("Error parsing releases from GitHub: <r><red>{any}<r>", .{@errorName(err)});
+ Output.prettyErrorln("Error parsing releases from GitHub: <r><red>{s}<r>", .{@errorName(err)});
Global.exit(1);
}
}
@@ -294,7 +294,7 @@ pub const UpgradeCommand = struct {
refresher.refresh();
const json_type: js_ast.Expr.Tag = @as(js_ast.Expr.Tag, expr.data);
- Output.prettyErrorln("JSON error - expected an object but received {any}", .{@tagName(json_type)});
+ Output.prettyErrorln("JSON error - expected an object but received {s}", .{@tagName(json_type)});
Global.exit(1);
}
@@ -312,7 +312,7 @@ pub const UpgradeCommand = struct {
progress.end();
refresher.refresh();
- Output.prettyErrorln("JSON Error parsing releases from GitHub: <r><red>tag_name<r> is missing?\n{any}", .{metadata_body.list.items});
+ Output.prettyErrorln("JSON Error parsing releases from GitHub: <r><red>tag_name<r> is missing?\n{s}", .{metadata_body.list.items});
Global.exit(1);
}
@@ -327,7 +327,7 @@ pub const UpgradeCommand = struct {
if (asset.asProperty("content_type")) |content_type| {
const content_type_ = (content_type.expr.asString(allocator)) orelse continue;
if (comptime Environment.isDebug) {
- Output.prettyln("Content-type: {any}", .{content_type_});
+ Output.prettyln("Content-type: {s}", .{content_type_});
Output.flush();
}
@@ -338,7 +338,7 @@ pub const UpgradeCommand = struct {
if (name_.expr.asString(allocator)) |name| {
if (comptime Environment.isDebug) {
const filename = if (!use_profile) Version.zip_filename else Version.profile_zip_filename;
- Output.prettyln("Comparing {any} vs {any}", .{ name, filename });
+ Output.prettyln("Comparing {s} vs {s}", .{ name, filename });
Output.flush();
}
@@ -347,7 +347,7 @@ pub const UpgradeCommand = struct {
version.zip_url = (asset.asProperty("browser_download_url") orelse break :get_asset).expr.asString(allocator) orelse break :get_asset;
if (comptime Environment.isDebug) {
- Output.prettyln("Found Zip {any}", .{version.zip_url});
+ Output.prettyln("Found Zip {s}", .{version.zip_url});
Output.flush();
}
@@ -366,7 +366,7 @@ pub const UpgradeCommand = struct {
progress.end();
refresher.refresh();
if (version.name()) |name| {
- Output.prettyErrorln("bun v{any} is out, but not for this platform ({any}) yet.", .{
+ Output.prettyErrorln("bun v{s} is out, but not for this platform ({s}) yet.", .{
name, Version.triplet,
});
}
@@ -383,7 +383,7 @@ pub const UpgradeCommand = struct {
@setCold(true);
_exec(ctx) catch |err| {
- Output.prettyErrorln("<r>bun upgrade failed with error: <red><b>{any}<r>\n\n<cyan>Please upgrade manually<r>:\n <b>curl -fsSL https://bun.sh/install | bash<r>\n\n", .{@errorName(err)});
+ Output.prettyErrorln("<r>bun upgrade failed with error: <red><b>{s}<r>\n\n<cyan>Please upgrade manually<r>:\n <b>curl -fsSL https://bun.sh/install | bash<r>\n\n", .{@errorName(err)});
Global.exit(1);
};
}
@@ -425,7 +425,7 @@ pub const UpgradeCommand = struct {
if (version.name() != null and version.isCurrent()) {
Output.prettyErrorln(
- "<r><green>Congrats!<r> You're already on the latest version of bun <d>(which is v{any})<r>",
+ "<r><green>Congrats!<r> You're already on the latest version of bun <d>(which is v{s})<r>",
.{
version.name().?,
},
@@ -441,7 +441,7 @@ pub const UpgradeCommand = struct {
Global.exit(1);
}
- Output.prettyErrorln("<r><b>bun <cyan>v{any}<r> is out<r>! You're on <blue>{any}<r>\n", .{ version.name().?, Global.package_json_version });
+ Output.prettyErrorln("<r><b>bun <cyan>v{s}<r> is out<r>! You're on <blue>{s}<r>\n", .{ version.name().?, Global.package_json_version });
Output.flush();
} else {
version = Version{
@@ -481,7 +481,7 @@ pub const UpgradeCommand = struct {
\\<r><red>error:<r> Canary builds are not available for this platform yet
\\
\\ Release: <cyan>https://github.com/oven-sh/bun/releases/tag/canary<r>
- \\ Filename: <b>{any}<r>
+ \\ Filename: <b>{s}<r>
\\
, .{
Version.zip_filename,
@@ -511,10 +511,11 @@ pub const UpgradeCommand = struct {
const version_name = version.name().?;
var save_dir_ = filesystem.tmpdir();
- var save_dir = save_dir_.makeOpenPathIterable(version_name, .{}) catch {
+ var save_dir_iterable = save_dir_.makeOpenPathIterable(version_name, .{}) catch {
Output.prettyErrorln("<r><red>error:<r> Failed to open temporary directory", .{});
Global.exit(1);
};
+ var save_dir = save_dir_iterable.dir;
var tmpdir_path = std.os.getFdPath(save_dir.fd, &tmpdir_path_buf) catch {
Output.prettyErrorln("<r><red>error:<r> Failed to read temporary directory", .{});
Global.exit(1);
@@ -529,14 +530,14 @@ pub const UpgradeCommand = struct {
if (use_profile) profile_exe_subpath else exe_subpath;
var zip_file = save_dir.createFileZ(tmpname, .{ .truncate = true }) catch |err| {
- Output.prettyErrorln("<r><red>error:<r> Failed to open temp file {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to open temp file {s}", .{@errorName(err)});
Global.exit(1);
};
{
_ = zip_file.writeAll(bytes) catch |err| {
save_dir.deleteFileZ(tmpname) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to write to temp file {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to write to temp file {s}", .{@errorName(err)});
Global.exit(1);
};
zip_file.close();
@@ -572,7 +573,7 @@ pub const UpgradeCommand = struct {
const unzip_result = unzip_process.spawnAndWait() catch |err| {
save_dir.deleteFileZ(tmpname) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to spawn unzip due to {any}.", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to spawn unzip due to {s}.", .{@errorName(err)});
Global.exit(1);
};
@@ -595,7 +596,7 @@ pub const UpgradeCommand = struct {
.max_output_bytes = 512,
}) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error<r> Failed to verify bun {any}<r>)", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error<r> Failed to verify bun {s}<r>)", .{@errorName(err)});
Global.exit(1);
};
@@ -617,7 +618,7 @@ pub const UpgradeCommand = struct {
save_dir_.deleteTree(version_name) catch {};
Output.prettyErrorln(
- "<r><red>error<r>: The downloaded version of bun (<red>{any}<r>) doesn't match the expected version (<b>{any}<r>)<r>. Cancelled upgrade",
+ "<r><red>error<r>: The downloaded version of bun (<red>{s}<r>) doesn't match the expected version (<b>{s}<r>)<r>. Cancelled upgrade",
.{
version_string[0..@min(version_string.len, 512)],
version_name,
@@ -637,24 +638,25 @@ pub const UpgradeCommand = struct {
// safe because the slash will no longer be in use
current_executable_buf[target_dir_.len] = 0;
var target_dirname = current_executable_buf[0..target_dir_.len :0];
- var target_dir = std.fs.openIterableDirAbsoluteZ(target_dirname, .{}) catch |err| {
+ var target_dir_iteratable = std.fs.openIterableDirAbsoluteZ(target_dirname, .{}) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to open bun's install directory {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to open bun's install directory {s}", .{@errorName(err)});
Global.exit(1);
};
+ var target_dir = target_dir_iteratable.dir;
if (use_canary) {
// Check if the versions are the same
const target_stat = target_dir.statFile(target_filename) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to stat target bun {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to stat target bun {s}", .{@errorName(err)});
Global.exit(1);
};
const dest_stat = save_dir.statFile(exe) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to stat source bun {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to stat source bun {s}", .{@errorName(err)});
Global.exit(1);
};
@@ -663,13 +665,13 @@ pub const UpgradeCommand = struct {
const target_hash = std.hash.Wyhash.hash(0, target_dir.readFile(target_filename, input_buf) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to read target bun {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to read target bun {s}", .{@errorName(err)});
Global.exit(1);
});
const source_hash = std.hash.Wyhash.hash(0, save_dir.readFile(exe, input_buf) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to read source bun {any}", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to read source bun {s}", .{@errorName(err)});
Global.exit(1);
});
@@ -687,7 +689,7 @@ pub const UpgradeCommand = struct {
if (env_loader.map.get("BUN_DRY_RUN") == null) {
C.moveFileZ(save_dir.fd, exe, target_dir.fd, target_filename) catch |err| {
save_dir_.deleteTree(version_name) catch {};
- Output.prettyErrorln("<r><red>error:<r> Failed to move new version of bun due to {any}. You could try the install script instead:\n curl -fsSL https://bun.sh/install | bash", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>error:<r> Failed to move new version of bun due to {s}. You could try the install script instead:\n curl -fsSL https://bun.sh/install | bash", .{@errorName(err)});
Global.exit(1);
};
}
@@ -724,7 +726,7 @@ pub const UpgradeCommand = struct {
\\
\\Changelog:
\\
- \\ https://github.com/oven-sh/bun/compare/{any}...main
+ \\ https://github.com/oven-sh/bun/compare/{s}...main
\\
,
.{Environment.git_sha},
@@ -735,7 +737,7 @@ pub const UpgradeCommand = struct {
Output.prettyErrorln(
\\<r> Upgraded.
\\
- \\<b><green>Welcome to bun v{any}!<r>
+ \\<b><green>Welcome to bun v{s}!<r>
\\
\\Report any bugs:
\\
@@ -743,11 +745,11 @@ pub const UpgradeCommand = struct {
\\
\\What's new:
\\
- \\ <cyan>https://github.com/oven-sh/bun/releases/tag/{any}<r>
+ \\ <cyan>https://github.com/oven-sh/bun/releases/tag/{s}<r>
\\
\\Changelog:
\\
- \\ https://github.com/oven-sh/bun/compare/{any}...{any}
+ \\ https://github.com/oven-sh/bun/compare/{s}...{s}
\\
,
.{ version_name, version.tag, bun_v, version.tag },
diff --git a/src/comptime_string_map.zig b/src/comptime_string_map.zig
index 1c159d2e8..b19e2f5f8 100644
--- a/src/comptime_string_map.zig
+++ b/src/comptime_string_map.zig
@@ -8,7 +8,7 @@ const strings = @import("./string_immutable.zig");
///
/// `kvs` expects a list literal containing list literals or an array/slice of structs
/// where `.@"0"` is the `[]const u8` key and `.@"1"` is the associated value of type `V`.
-/// TODO: https://github.com/ziglang/zig/issues/4335
+
pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, comptime kvs_list: anytype) type {
const KV = struct {
key: []const KeyType,
diff --git a/src/defines.zig b/src/defines.zig
index ce3410204..a0c885f34 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -79,9 +79,9 @@ pub const DefineData = struct {
while (splitter.next()) |part| {
if (!js_lexer.isIdentifier(part)) {
if (strings.eql(part, entry.key_ptr)) {
- try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{any}\" must be a valid identifier", .{entry.key_ptr.*});
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{s}\" must be a valid identifier", .{entry.key_ptr.*});
} else {
- try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{any}\" contains invalid identifier \"{any}\"", .{ part, entry.value_ptr.* });
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.value_ptr.* });
}
break;
}
diff --git a/src/deps/picohttp.zig b/src/deps/picohttp.zig
index 115cc976b..d9c4ad2cb 100644
--- a/src/deps/picohttp.zig
+++ b/src/deps/picohttp.zig
@@ -21,15 +21,15 @@ pub const Header = struct {
pub fn format(self: Header, comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void {
if (Output.enable_ansi_colors) {
if (self.isMultiline()) {
- try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{any}", true), .{self.value});
+ try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{s}", true), .{self.value});
} else {
- try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{any}<r><d>: <r>{any}", true), .{ self.name, self.value });
+ try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{s}<r><d>: <r>{s}", true), .{ self.name, self.value });
}
} else {
if (self.isMultiline()) {
- try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{any}", false), .{self.value});
+ try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{s}", false), .{self.value});
} else {
- try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{any}<r><d>: <r>{any}", false), .{ self.name, self.value });
+ try fmt.format(writer, comptime Output.prettyFmt("<r><cyan>{s}<r><d>: <r>{s}", false), .{ self.name, self.value });
}
}
}
@@ -74,7 +74,7 @@ pub const Request = struct {
}
pub fn format(self: Request, comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void {
- try fmt.format(writer, "{any} {any}\n", .{ self.method, self.path });
+ try fmt.format(writer, "{any} {s}\n", .{ self.method, self.path });
for (self.headers) |header| {
_ = try writer.write("\t");
try fmt.format(writer, "{any}\n", .{header});
diff --git a/src/deps/zig-clap/clap.zig b/src/deps/zig-clap/clap.zig
index 17ce0c7da..a05703d3f 100644
--- a/src/deps/zig-clap/clap.zig
+++ b/src/deps/zig-clap/clap.zig
@@ -221,13 +221,13 @@ pub const Diagnostic = struct {
Arg{ .prefix = "", .name = diag.arg };
switch (err) {
- error.DoesntTakeValue => try stream.print("The argument '{any}{any}' does not take a value\n", .{ a.prefix, a.name }),
- error.MissingValue => try stream.print("The argument '{any}{any}' requires a value but none was supplied\n", .{ a.prefix, a.name }),
+ error.DoesntTakeValue => try stream.print("The argument '{s}{s}' does not take a value\n", .{ a.prefix, a.name }),
+ error.MissingValue => try stream.print("The argument '{s}{s}' requires a value but none was supplied\n", .{ a.prefix, a.name }),
error.InvalidArgument => if (a.prefix.len > 0 and a.name.len > 0)
- try stream.print("Invalid argument '{any}{any}'\n", .{ a.prefix, a.name })
+ try stream.print("Invalid argument '{s}{s}'\n", .{ a.prefix, a.name })
else
try stream.print("Failed to parse argument due to unexpected single dash\n", .{}),
- else => try stream.print("Error while parsing arguments: {any}\n", .{@errorName(err)}),
+ else => try stream.print("Error while parsing arguments: {s}\n", .{@errorName(err)}),
}
}
};
@@ -386,9 +386,9 @@ fn printParam(
switch (param.takes_value) {
.none => {},
- .one => try stream.print(" <{any}>", .{valueText(context, param)}),
- .one_optional => try stream.print(" <{any}>?", .{valueText(context, param)}),
- .many => try stream.print(" <{any}>...", .{valueText(context, param)}),
+ .one => try stream.print(" <{s}>", .{try valueText(context, param)}),
+ .one_optional => try stream.print(" <{s}>?", .{try valueText(context, param)}),
+ .many => try stream.print(" <{s}>...", .{try valueText(context, param)}),
}
}
@@ -489,12 +489,12 @@ pub fn usageFull(
if (cos.bytes_written != 0)
try cs.writeByte(' ');
- try cs.print("[{any}{any}", .{ prefix, name });
+ try cs.print("[{s}{s}", .{ prefix, name });
switch (param.takes_value) {
.none => {},
- .one => try cs.print(" <{any}>", .{try valueText(context, param)}),
- .one_optional => try cs.print(" <{any}>?", .{try valueText(context, param)}),
- .many => try cs.print(" <{any}>...", .{try valueText(context, param)}),
+ .one => try cs.print(" <{s}>", .{try valueText(context, param)}),
+ .one_optional => try cs.print(" <{s}>?", .{try valueText(context, param)}),
+ .many => try cs.print(" <{s}>...", .{try valueText(context, param)}),
}
try cs.writeByte(']');
@@ -503,7 +503,7 @@ pub fn usageFull(
if (positional) |p| {
if (cos.bytes_written != 0)
try cs.writeByte(' ');
- try cs.print("<{any}>", .{try valueText(context, p)});
+ try cs.print("<{s}>", .{try valueText(context, p)});
}
}
diff --git a/src/env_loader.zig b/src/env_loader.zig
index 5363a443e..1d6018088 100644
--- a/src/env_loader.zig
+++ b/src/env_loader.zig
@@ -534,7 +534,7 @@ pub const Loader = struct {
const value: string = entry.value_ptr.*;
if (strings.startsWith(entry.key_ptr.*, prefix)) {
- const key_str = std.fmt.allocPrint(key_allocator, "process.env.{any}", .{entry.key_ptr.*}) catch unreachable;
+ const key_str = std.fmt.allocPrint(key_allocator, "process.env.{s}", .{entry.key_ptr.*}) catch unreachable;
e_strings[0] = js_ast.E.String{
.data = if (value.len > 0)
@@ -720,9 +720,9 @@ pub const Loader = struct {
if (yes) {
loaded_i += 1;
if (count == 1 or (loaded_i >= count and count > 1)) {
- Output.prettyError("\"{any}\"", .{all[i]});
+ Output.prettyError("\"{s}\"", .{all[i]});
} else {
- Output.prettyError("\"{any}\", ", .{all[i]});
+ Output.prettyError("\"{s}\", ", .{all[i]});
}
}
}
@@ -744,7 +744,7 @@ pub const Loader = struct {
},
error.FileBusy, error.DeviceBusy, error.AccessDenied, error.IsDir => {
if (!this.quiet) {
- Output.prettyErrorln("<r><red>{any}<r> error loading {any} file", .{ @errorName(err), base });
+ Output.prettyErrorln("<r><red>{s}<r> error loading {s} file", .{ @errorName(err), base });
}
// prevent retrying
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index 3d016efb6..602d14fbd 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -105,3 +105,5 @@ pub const disable_lolhtml = false;
/// other ipv4 hosts. This is a workaround for that.
/// "localhost" fails to connect.
pub const hardcode_localhost_to_127_0_0_1 = true;
+
+pub const dump_recv_to_stdout = true;
diff --git a/src/fs.zig b/src/fs.zig
index 97e296074..a421803a6 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -67,7 +67,7 @@ pub const BytecodeCacheFetcher = struct {
this.fd = @truncate(StoredFileDescriptorType, cache_file.handle);
return @truncate(StoredFileDescriptorType, cache_file.handle);
} else |err| {
- Output.prettyWarnln("<r><yellow>Warn<r>: Bytecode caching unavailable due to error: {any}", .{@errorName(err)});
+ Output.prettyWarnln("<r><yellow>Warn<r>: Bytecode caching unavailable due to error: {s}", .{@errorName(err)});
Output.flush();
this.fd = 0;
return null;
@@ -103,7 +103,7 @@ pub const FileSystem = struct {
pub fn tmpdir(fs: *FileSystem) std.fs.Dir {
if (tmpdir_handle == null) {
- tmpdir_handle = fs.fs.openTmpDir() catch unreachable;
+ tmpdir_handle = (fs.fs.openTmpDir() catch unreachable).dir;
}
return tmpdir_handle.?;
@@ -117,7 +117,7 @@ pub const FileSystem = struct {
pub fn tmpname(_: *const FileSystem, extname: string, buf: []u8, hash: u64) ![*:0]u8 {
// PRNG was...not so random
- return try std.fmt.bufPrintZ(buf, ".{x}{any}", .{ @truncate(u64, @intCast(u128, hash) * @intCast(u128, std.time.nanoTimestamp())), extname });
+ return try std.fmt.bufPrintZ(buf, ".{any}{s}", .{ bun.fmt.x(@truncate(u64, @intCast(u128, hash) * @intCast(u128, std.time.nanoTimestamp()))), extname });
}
pub var max_fd: FileDescriptorType = 0;
@@ -256,16 +256,16 @@ pub const FileSystem = struct {
if (comptime FeatureFlags.verbose_fs) {
if (_kind == .dir) {
- Output.prettyln(" + {any}/", .{stored_name});
+ Output.prettyln(" + {s}/", .{stored_name});
} else {
- Output.prettyln(" + {any}", .{stored_name});
+ Output.prettyln(" + {s}", .{stored_name});
}
}
}
pub fn init(dir: string) DirEntry {
if (comptime FeatureFlags.verbose_fs) {
- Output.prettyln("\n {any}", .{dir});
+ Output.prettyln("\n {s}", .{dir});
}
return DirEntry{ .dir = dir, .data = EntryMap{} };
@@ -561,7 +561,7 @@ pub const FileSystem = struct {
pub var tmpdir_path: []const u8 = undefined;
pub var tmpdir_path_set = false;
- pub fn openTmpDir(_: *const RealFS) !std.fs.Dir {
+ pub fn openTmpDir(_: *const RealFS) !std.fs.IterableDir {
if (!tmpdir_path_set) {
tmpdir_path = std.os.getenvZ("BUN_TMPDIR") orelse std.os.getenvZ("TMPDIR") orelse PLATFORM_TMP_DIR;
tmpdir_path_set = true;
@@ -611,8 +611,8 @@ pub const FileSystem = struct {
var tmpdir_ = try rfs.openTmpDir();
const flags = std.os.O.CREAT | std.os.O.RDWR | std.os.O.CLOEXEC;
- this.dir_fd = tmpdir_.fd;
- this.fd = try std.os.openatZ(tmpdir_.fd, name, flags, std.os.S.IRWXO);
+ this.dir_fd = tmpdir_.dir.fd;
+ this.fd = try std.os.openatZ(tmpdir_.dir.fd, name, flags, std.os.S.IRWXO);
}
pub fn promote(this: *Tmpfile, from_name: [*:0]const u8, destination_fd: std.os.fd_t, name: [*:0]const u8) !void {
@@ -729,10 +729,10 @@ pub const FileSystem = struct {
) !string {
return try std.fmt.bufPrint(
&hash_name_buf,
- "{any}-{x}",
+ "{s}-{any}",
.{
basename,
- this.hash(),
+ bun.fmt.x(this.hash()),
},
);
}
@@ -1285,7 +1285,7 @@ pub const Path = struct {
}
pub fn generateKey(p: *Path, allocator: std.mem.Allocator) !string {
- return try std.fmt.allocPrint(allocator, "{any}://{any}", .{ p.namespace, p.text });
+ return try std.fmt.allocPrint(allocator, "{s}://{s}", .{ p.namespace, p.text });
}
pub fn init(text: string) Path {
diff --git a/src/global.zig b/src/global.zig
index fe4f5246a..ff0d7a450 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -31,6 +31,55 @@ pub const path = @import("./resolver/resolve_path.zig");
pub const fmt = struct {
pub usingnamespace std.fmt;
+fn formatSliceHexImpl(comptime case: fmt.Case) type {
+ const charset = "0123456789" ++ if (case == .upper) "ABCDEF" else "abcdef";
+
+ return struct {
+ pub fn f(
+ bytes: []const u8,
+ comptime _: []const u8,
+ _: std.fmt.FormatOptions,
+ writer: anytype,
+ ) !void {
+ var buf: [2]u8 = undefined;
+
+ for (bytes) |c| {
+ buf[0] = charset[c >> 4];
+ buf[1] = charset[c & 15];
+ try writer.writeAll(&buf);
+ }
+ }
+ };
+}
+
+
+const formatSliceHexLower = formatSliceHexImpl(.lower).f;
+const formatSliceHexUpper = formatSliceHexImpl(.upper).f;
+
+
+
+
+
+ pub fn x(to_lower_hex: anytype) std.fmt.Formatter(formatSliceHexLower) {
+ return std.fmt.Formatter(formatSliceHexLower){.data = asBytesWithType(@TypeOf(to_lower_hex), to_lower_hex)};
+ }
+
+ fn asBytesWithType(comptime Type: type, value: Type) []const u8 {
+ if (comptime Type == i64 or Type == u64 or Type == f64) {
+ return @bitCast([8]u8, value)[0..];
+ } else if (comptime Type == i32 or Type == u32 or Type == f32) {
+ return @bitCast([4]u8, value)[0..];
+ } else if (comptime Type == i16 or Type == u16) {
+ return @bitCast([2]u8, value)[0..];
+ } else if (comptime Type == i8 or Type == u8) {
+ return @bitCast([1]u8, value)[0..];
+ } else {
+ return std.mem.asBytes(value);
+ }
+
+ }
+
+
pub const SizeFormatter = struct {
value: usize = 0,
pub fn format(self: SizeFormatter, comptime _: []const u8, opts: fmt.FormatOptions, writer: anytype) !void {
@@ -378,3 +427,26 @@ pub const Bunfig = @import("./bunfig.zig").Bunfig;
pub const HTTPThead = @import("./http_client_async.zig").HTTPThread;
pub const Analytics = @import("./analytics/analytics_thread.zig");
+
+
+pub fn allocateFrame(comptime Type: type) error{OutOfMemory}!Type {
+ const Mimalloc = @import("./memory_allocator.zig");
+ if (Mimalloc.alignedAlloc(@sizeOf(Type), @alignOf(Type))) |ptr| {
+ return @ptrCast(*Type, ptr);
+ } else {
+ return error.OutOfMemory;
+ }
+}
+
+pub fn openIterableDirZ(fd: std.os.fd_t, path_: [:0]const u8) !std.fs.IterableDir {
+ const dir_fd = try std.os.openatZ(fd, path_, std.os.O.DIRECTORY, 0);
+ return std.fs.IterableDir{.dir = .{.fd = dir_fd} };
+}
+
+pub fn openIterableDirZFromDir(dir: std.fs.Dir, path_: [:0]const u8) !std.fs.IterableDir {
+ return openIterableDirZ(dir.fd, path_);
+}
+
+pub fn openIterableDirZFromIterableDir(dir: std.fs.IterableDir, path_: [:0]const u8) !std.fs.IterableDir {
+ return openIterableDirZFromIterableDir(dir.dir, path_);
+} \ No newline at end of file
diff --git a/src/http.zig b/src/http.zig
index e22e94cc0..907801e5b 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -238,14 +238,14 @@ pub const RequestContext = struct {
display_port = "80";
}
}
- this.origin = ZigURL.parse(std.fmt.allocPrint(this.allocator, "{any}://{any}:{any}/", .{ display_protocol, display_host, display_port }) catch unreachable);
+ this.origin = ZigURL.parse(std.fmt.allocPrint(this.allocator, "{s}://{s}:{s}/", .{ display_protocol, display_host, display_port }) catch unreachable);
}
}
pub fn getFullURL(this: *RequestContext) [:0]const u8 {
if (this.full_url.len == 0) {
if (this.origin.isAbsolute()) {
- this.full_url = std.fmt.allocPrintZ(this.allocator, "{any}{any}", .{ this.origin.origin, this.request.path }) catch unreachable;
+ this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}", .{ this.origin.origin, this.request.path }) catch unreachable;
} else {
this.full_url = this.allocator.dupeZ(u8, this.request.path) catch unreachable;
}
@@ -257,9 +257,9 @@ pub const RequestContext = struct {
pub fn getFullURLForSourceMap(this: *RequestContext) [:0]const u8 {
if (this.full_url.len == 0) {
if (this.origin.isAbsolute()) {
- this.full_url = std.fmt.allocPrintZ(this.allocator, "{any}{any}.map", .{ this.origin.origin, this.request.path }) catch unreachable;
+ this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}.map", .{ this.origin.origin, this.request.path }) catch unreachable;
} else {
- this.full_url = std.fmt.allocPrintZ(this.allocator, "{any}.map", .{this.request.path}) catch unreachable;
+ this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}.map", .{this.request.path}) catch unreachable;
}
}
@@ -378,7 +378,7 @@ pub const RequestContext = struct {
if (this.bundler.options.node_modules_bundle_url.len > 0) {
add_preload: {
- const node_modules_preload_header_value = std.fmt.bufPrint(remaining, "<{any}>; rel=modulepreload", .{
+ const node_modules_preload_header_value = std.fmt.bufPrint(remaining, "<{s}>; rel=modulepreload", .{
this.bundler.options.node_modules_bundle_url,
}) catch break :add_preload;
@@ -480,8 +480,8 @@ pub const RequestContext = struct {
// On Windows, we don't keep the directory handle open forever because Windows doesn't like that.
const public_dir: std.fs.Dir = this.bundler.options.routes.static_dir_handle orelse std.fs.openDirAbsolute(this.bundler.options.routes.static_dir, .{}) catch |err| {
- this.bundler.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, "Opening public directory failed: {any}", .{@errorName(err)}) catch unreachable;
- Output.printErrorln("Opening public directory failed: {any}", .{@errorName(err)});
+ this.bundler.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, "Opening public directory failed: {s}", .{@errorName(err)}) catch unreachable;
+ Output.printErrorln("Opening public directory failed: {s}", .{@errorName(err)});
this.bundler.options.routes.static_dir_enabled = false;
return null;
};
@@ -618,7 +618,7 @@ pub const RequestContext = struct {
}
pub fn printStatusLineError(err: anyerror, buf: []u8) []const u8 {
- return std.fmt.bufPrint(buf, "HTTP/1.1 500 {any}\r\n", .{@errorName(err)}) catch unreachable;
+ return std.fmt.bufPrint(buf, "HTTP/1.1 500 {s}\r\n", .{@errorName(err)}) catch unreachable;
}
pub fn prepareToSendBody(
@@ -685,7 +685,7 @@ pub const RequestContext = struct {
return error.SocketClosed;
}
- Output.prettyErrorln("send() error: {any}", .{err.toSystemError().message.slice()});
+ Output.prettyErrorln("send() error: {s}", .{err.toSystemError().message.slice()});
return erro;
},
@@ -785,9 +785,9 @@ pub const RequestContext = struct {
pub fn sendInternalError(ctx: *RequestContext, err: anytype) !void {
defer ctx.done();
try ctx.writeStatusError(err);
- const printed = std.fmt.bufPrint(&error_buf, "error: {any}\nPlease see your terminal for more details", .{@errorName(err)}) catch |err2| brk: {
+ const printed = std.fmt.bufPrint(&error_buf, "error: {s}\nPlease see your terminal for more details", .{@errorName(err)}) catch |err2| brk: {
if (Environment.isDebug or Environment.isTest) {
- Global.panic("error while printing error: {any}", .{@errorName(err2)});
+ Global.panic("error while printing error: {s}", .{@errorName(err2)});
}
break :brk "Internal error";
@@ -913,7 +913,7 @@ pub const RequestContext = struct {
defer ctx.done();
const stats = file.stat() catch |err| {
- Output.prettyErrorln("<r><red>Error {any}<r> reading index.html", .{@errorName(err)});
+ Output.prettyErrorln("<r><red>Error {s}<r> reading index.html", .{@errorName(err)});
ctx.writeStatus(500) catch {};
return;
};
@@ -1223,7 +1223,7 @@ pub const RequestContext = struct {
step,
err,
- "<r>JavaScript VM failed to start due to <red>{any}<r>.",
+ "<r>JavaScript VM failed to start due to <red>{s}<r>.",
.{
@errorName(err),
},
@@ -1390,7 +1390,7 @@ pub const RequestContext = struct {
handler.handleJSErrorFmt(
.load_entry_point,
err,
- "<r>JavaScript VM failed to start.\n<red>{any}:<r> while loading <r><b>\"{any}\"",
+ "<r>JavaScript VM failed to start.\n<red>{s}:<r> while loading <r><b>\"{s}\"",
.{ @errorName(err), entry_point },
) catch {};
vm.flush();
@@ -1406,7 +1406,7 @@ pub const RequestContext = struct {
handler.handleRuntimeJSError(
result,
.eval_entry_point,
- "<r>JavaScript VM failed to start.\nwhile loading <r><b>\"{any}\"",
+ "<r>JavaScript VM failed to start.\nwhile loading <r><b>\"{s}\"",
.{entry_point},
) catch {};
vm.flush();
@@ -1435,7 +1435,7 @@ pub const RequestContext = struct {
if (vm.bundler.options.framework.?.display_name.len > 0) {
Output.prettyError(
- " {any} ready<d>! (powered by bun)\n<r>",
+ " {s} ready<d>! (powered by bun)\n<r>",
.{
vm.bundler.options.framework.?.display_name,
},
@@ -1519,7 +1519,7 @@ pub const RequestContext = struct {
handler.handleJSErrorFmt(
.resolve_entry_point,
error.EntryPointDisabled,
- "<r>JavaScript VM failed to start due to disabled entry point: <r><b>\"{any}\"",
+ "<r>JavaScript VM failed to start due to disabled entry point: <r><b>\"{s}\"",
.{resolved_entry_point.path_pair.primary.text},
) catch {};
javascript_disabled = true;
@@ -1914,7 +1914,7 @@ pub const RequestContext = struct {
defer Output.flush();
handler.conn.client.getError() catch |err| {
- Output.prettyErrorln("<r><red>Websocket ERR:<r> <b>{any}<r>", .{err});
+ Output.prettyErrorln("<r><red>Websocket ERR:<r> <b>{s}<r>", .{@errorName(err)});
handler.tombstone = true;
is_socket_closed = true;
};
@@ -1928,7 +1928,7 @@ pub const RequestContext = struct {
continue;
},
else => {
- Output.prettyErrorln("<r><red>Websocket ERR:<r> <b>{any}<r>", .{err});
+ Output.prettyErrorln("<r><red>Websocket ERR:<r> <b>{s}<r>", .{@errorName(err)});
},
}
return;
@@ -1962,7 +1962,7 @@ pub const RequestContext = struct {
);
if (Watcher.getHash(file_path) != full_build.id) {
- Output.prettyErrorln("<r><red>ERR:<r> <b>File path hash mismatch for {any}.<r>", .{full_build.file_path});
+ Output.prettyErrorln("<r><red>ERR:<r> <b>File path hash mismatch for {s}.<r>", .{full_build.file_path});
continue;
}
// save because WebSocket's buffer is 8096
@@ -1971,7 +1971,7 @@ pub const RequestContext = struct {
path_buf.ptr[path_buf.len] = 0;
var file_path_z: [:0]u8 = path_buf.ptr[0..path_buf.len :0];
const file = std.fs.openFileAbsoluteZ(file_path_z, .{ .mode = .read_only }) catch |err| {
- Output.prettyErrorln("<r><red>ERR:<r>{any} opening file <b>{any}<r> <r>", .{ @errorName(err), full_build.file_path });
+ Output.prettyErrorln("<r><red>ERR:<r>{s} opening file <b>{s}<r> <r>", .{ @errorName(err), full_build.file_path });
continue;
};
Fs.FileSystem.setMaxFd(file.handle);
@@ -2040,7 +2040,7 @@ pub const RequestContext = struct {
switch (build_result.value) {
.fail => {
Output.prettyErrorln(
- "error: <b>{any}<r><b>",
+ "error: <b>{s}<r><b>",
.{
file_path,
},
@@ -2049,7 +2049,7 @@ pub const RequestContext = struct {
.success => {
if (build_result.timestamp > cmd.timestamp) {
Output.prettyErrorln(
- "<r><b><green>{d}ms<r> <d>built<r> <b>{any}<r><b> <r><d>({d}+ LOC)",
+ "<r><b><green>{d}ms<r> <d>built<r> <b>{s}<r><b> <r><d>({d}+ LOC)",
.{
build_result.timestamp - cmd.timestamp,
file_path,
@@ -2123,7 +2123,7 @@ pub const RequestContext = struct {
_ = try handler.websocket.writeDataFrame(pong);
},
else => {
- Output.prettyErrorln("Websocket unknown opcode: {any}", .{@tagName(frame.header.opcode)});
+ Output.prettyErrorln("Websocket unknown opcode: {s}", .{@tagName(frame.header.opcode)});
},
}
}
@@ -2162,7 +2162,7 @@ pub const RequestContext = struct {
};
// this error is noisy
// return std.fmt.parseInt(u8, v, 10) catch {
- // Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Version is invalid {any}", .{v});
+ // Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Version is invalid {s}", .{v});
// return error.BadRequest;
// };
}
@@ -2173,7 +2173,7 @@ pub const RequestContext = struct {
var request: *RequestContext = &self.ctx;
const key = (request.header("Sec-WebSocket-Key") orelse return error.BadRequest);
if (key.len < 8) {
- Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Key is less than 8 characters long: {any}", .{key});
+ Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Key is less than 8 characters long: {s}", .{key});
return error.BadRequest;
}
@@ -2584,7 +2584,7 @@ pub const RequestContext = struct {
)) {
if (ctx.watcher.watchloop_handle == null) {
ctx.watcher.start() catch |err| {
- Output.prettyErrorln("Failed to start watcher: {any}", .{@errorName(err)});
+ Output.prettyErrorln("Failed to start watcher: {s}", .{@errorName(err)});
};
}
} else |_| {}
@@ -2982,7 +2982,7 @@ pub const RequestContext = struct {
if (editor != .none) {
editor.open(http_editor_context.path, path.text, line, column, bun.default_allocator) catch |err| {
if (editor != .other) {
- Output.prettyErrorln("Error {any} opening in {any}", .{ @errorName(err), @tagName(editor) });
+ Output.prettyErrorln("Error {s} opening in {s}", .{ @errorName(err), @tagName(editor) });
}
http_editor_context.editor = Editor.none;
@@ -3006,7 +3006,7 @@ pub const RequestContext = struct {
resolve_result.file_fd
else brk: {
var file = std.fs.openFileAbsoluteZ(path.textZ(), .{ .mode = .read_only }) catch |err| {
- Output.prettyErrorln("Failed to open {any} due to error {any}", .{ path.text, @errorName(err) });
+ Output.prettyErrorln("Failed to open {s} due to error {s}", .{ path.text, @errorName(err) });
return try ctx.sendInternalError(err);
};
needs_close = true;
@@ -3021,7 +3021,7 @@ pub const RequestContext = struct {
const content_length = brk: {
var file = std.fs.File{ .handle = fd };
var stat = file.stat() catch |err| {
- Output.prettyErrorln("Failed to read {any} due to error {any}", .{ path.text, @errorName(err) });
+ Output.prettyErrorln("Failed to read {s} due to error {s}", .{ path.text, @errorName(err) });
return try ctx.sendInternalError(err);
};
break :brk stat.size;
@@ -3314,7 +3314,7 @@ pub const Server = struct {
var hinted_content_fbs = std.io.fixedBufferStream(filechange_buf_hinted[header.len..]);
if (comptime Environment.isDebug) {
- Output.prettyErrorln("[watcher] {any}: -- {}", .{ @tagName(kind), event.op });
+ Output.prettyErrorln("[watcher] {s}: -- {}", .{ @tagName(kind), event.op });
}
switch (kind) {
@@ -3328,7 +3328,7 @@ pub const Server = struct {
);
if (comptime FeatureFlags.verbose_watcher) {
- Output.prettyErrorln("<r><d>File changed: {any}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
+ Output.prettyErrorln("<r><d>File changed: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
}
} else {
const change_message = Api.WebsocketMessageFileChangeNotification{
@@ -3341,12 +3341,12 @@ pub const Server = struct {
const change_buf = content_fbs.getWritten();
const written_buf = filechange_buf[0 .. header.len + change_buf.len];
RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| {
- Output.prettyErrorln("Error writing change notification: {any}<r>", .{@errorName(err)});
+ Output.prettyErrorln("Error writing change notification: {s}<r>", .{@errorName(err)});
};
if (comptime is_emoji_enabled) {
- Output.prettyErrorln("<r>📜 <d>File change: {any}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
+ Output.prettyErrorln("<r>📜 <d>File change: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
} else {
- Output.prettyErrorln("<r> <d>File change: {any}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
+ Output.prettyErrorln("<r> <d>File change: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
}
}
},
@@ -3411,12 +3411,12 @@ pub const Server = struct {
const change_buf = hinted_content_fbs.getWritten();
const written_buf = filechange_buf_hinted[0 .. header.len + change_buf.len];
RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| {
- Output.prettyErrorln("Error writing change notification: {any}<r>", .{@errorName(err)});
+ Output.prettyErrorln("Error writing change notification: {s}<r>", .{@errorName(err)});
};
if (comptime is_emoji_enabled) {
- Output.prettyErrorln("<r>📜 <d>File change: {any}<r>", .{ctx.bundler.fs.relativeTo(abs_path)});
+ Output.prettyErrorln("<r>📜 <d>File change: {s}<r>", .{ctx.bundler.fs.relativeTo(abs_path)});
} else {
- Output.prettyErrorln("<r> <d>File change: {any}<r>", .{ctx.bundler.fs.relativeTo(abs_path)});
+ Output.prettyErrorln("<r> <d>File change: {s}<r>", .{ctx.bundler.fs.relativeTo(abs_path)});
}
}
}
@@ -3425,9 +3425,9 @@ pub const Server = struct {
// if (event.op.delete or event.op.rename)
// ctx.watcher.removeAtIndex(event.index, hashes[event.index], parent_hashes, .directory);
if (comptime is_emoji_enabled) {
- Output.prettyErrorln("<r>📁 <d>Dir change: {any}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
+ Output.prettyErrorln("<r>📁 <d>Dir change: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
} else {
- Output.prettyErrorln("<r> <d>Dir change: {any}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
+ Output.prettyErrorln("<r> <d>Dir change: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
}
},
}
@@ -3480,7 +3480,7 @@ pub const Server = struct {
continue :restart;
},
else => {
- Output.prettyErrorln("<r><red>{any} while trying to start listening on port {d}.\n\n", .{ @errorName(err), port });
+ Output.prettyErrorln("<r><red>{s} while trying to start listening on port {d}.\n\n", .{ @errorName(err), port });
Global.exit(1);
},
}
@@ -3537,7 +3537,7 @@ pub const Server = struct {
if (std.mem.readIntNative(u32, &addr.ipv4.host.octets) == 0 or std.mem.readIntNative(u128, &addr.ipv6.host.octets) == 0) {
if (server.bundler.options.routes.single_page_app_routing) {
Output.prettyError(
- " bun!! <d>v{any}<r>\n\n\n Link:<r> <b><cyan>http://localhost:{d}<r>\n <d>{any}/index.html<r> \n\n\n",
+ " bun!! <d>v{s}<r>\n\n\n Link:<r> <b><cyan>http://localhost:{d}<r>\n <d>{s}/index.html<r> \n\n\n",
.{
Global.package_json_version_with_sha,
addr.ipv4.port,
@@ -3545,20 +3545,20 @@ pub const Server = struct {
},
);
} else {
- Output.prettyError(" bun!! <d>v{any}<r>\n\n\n<d> Link:<r> <b><cyan>http://localhost:{d}<r>\n\n\n", .{
+ Output.prettyError(" bun!! <d>v{s}<r>\n\n\n<d> Link:<r> <b><cyan>http://localhost:{d}<r>\n\n\n", .{
Global.package_json_version_with_sha,
addr.ipv4.port,
});
}
} else {
if (server.bundler.options.routes.single_page_app_routing) {
- Output.prettyError(" bun!! <d>v{any}<r>\n\n\n<d> Link:<r> <b><cyan>http://{any}<r>\n <d>{any}/index.html<r> \n\n\n", .{
+ Output.prettyError(" bun!! <d>v{s}<r>\n\n\n<d> Link:<r> <b><cyan>http://{any}<r>\n <d>{s}/index.html<r> \n\n\n", .{
Global.package_json_version_with_sha,
addr,
display_path,
});
} else {
- Output.prettyError(" bun!! <d>v{any}\n\n\n<d> Link:<r> <b><cyan>http://{any}<r>\n\n\n", .{
+ Output.prettyError(" bun!! <d>v{s}\n\n\n<d> Link:<r> <b><cyan>http://{any}<r>\n\n\n", .{
Global.package_json_version_with_sha,
addr,
});
@@ -3641,7 +3641,7 @@ pub const Server = struct {
var req = picohttp.Request.parse(req_buf_node.data[0..read_size], &req_headers_buf) catch |err| {
_ = conn.client.write(RequestContext.printStatusLine(400) ++ "\r\n\r\n", SOCKET_FLAGS) catch {};
_ = Syscall.close(conn.client.socket.fd);
- Output.printErrorln("ERR: {any}", .{@errorName(err)});
+ Output.printErrorln("ERR: {s}", .{@errorName(err)});
return;
};
@@ -3657,7 +3657,7 @@ pub const Server = struct {
server.watcher,
server.timer,
) catch |err| {
- Output.prettyErrorln("<r>[<red>{any}<r>] - <b>{any}<r>: {any}", .{ @errorName(err), req.method, req.path });
+ Output.prettyErrorln("<r>[<red>{s}<r>] - <b>{s}<r>: {s}", .{ @errorName(err), req.method, req.path });
_ = Syscall.close(conn.client.socket.fd);
request_arena.deinit();
return;
@@ -3682,7 +3682,7 @@ pub const Server = struct {
if (req_ctx.url.needs_redirect) {
req_ctx.handleRedirect(req_ctx.url.path) catch |err| {
- Output.prettyErrorln("<r>[<red>{any}<r>] - <b>{any}<r>: {any}", .{ @errorName(err), req.method, req.path });
+ Output.prettyErrorln("<r>[<red>{s}<r>] - <b>{s}<r>: {s}", .{ @errorName(err), req.method, req.path });
conn.client.deinit();
return;
};
@@ -3726,13 +3726,13 @@ pub const Server = struct {
200, 304, 101 => {},
201...303, 305...399 => {
- Output.prettyErrorln("<r><green>{d}<r><d> {any} <r>{any}<d> as {any}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
+ Output.prettyErrorln("<r><green>{d}<r><d> {s} <r>{s}<d> as {s}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
},
400...499 => {
- Output.prettyErrorln("<r><yellow>{d}<r><d> {any} <r>{any}<d> as {any}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
+ Output.prettyErrorln("<r><yellow>{d}<r><d> {s} <r>{s}<d> as {s}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
},
else => {
- Output.prettyErrorln("<r><red>{d}<r><d> {any} <r>{any}<d> as {any}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
+ Output.prettyErrorln("<r><red>{d}<r><d> {s} <r>{s}<d> as {s}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
},
}
}
@@ -3748,13 +3748,13 @@ pub const Server = struct {
200, 304, 101 => {},
201...303, 305...399 => {
- Output.prettyErrorln("<r><green>{d}<r><d> <r>{any}<d> {any} as {any}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
+ Output.prettyErrorln("<r><green>{d}<r><d> <r>{s}<d> {s} as {s}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
},
400...499 => {
- Output.prettyErrorln("<r><yellow>{d}<r><d> <r>{any}<d> {any} as {any}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
+ Output.prettyErrorln("<r><yellow>{d}<r><d> <r>{s}<d> {s} as {s}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
},
else => {
- Output.prettyErrorln("<r><red>{d}<r><d> <r>{any}<d> {any} as {any}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
+ Output.prettyErrorln("<r><red>{d}<r><d> <r>{s}<d> {s} as {s}<r>", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value });
},
}
}
@@ -3772,7 +3772,7 @@ pub const Server = struct {
}
var finished = req_ctx.handleReservedRoutes(server) catch |err| {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
return;
};
@@ -3783,7 +3783,7 @@ pub const Server = struct {
if (comptime features.single_page_app_routing) {
if (req_ctx.url.isRoot(server.bundler.options.routes.asset_prefix_path)) {
req_ctx.sendSinglePageHTML() catch |err| {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
};
finished = true;
@@ -3798,7 +3798,7 @@ pub const Server = struct {
if (req_ctx.matchPublicFolder(comptime features.public_folder == .last or features.single_page_app_routing)) |result| {
finished = true;
req_ctx.renderServeResult(result) catch |err| {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
return;
};
@@ -3816,7 +3816,7 @@ pub const Server = struct {
switch (err) {
error.ModuleNotFound => {},
else => {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
},
}
@@ -3832,7 +3832,7 @@ pub const Server = struct {
break :request_handler;
},
else => {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
},
}
@@ -3847,7 +3847,7 @@ pub const Server = struct {
if (req_ctx.matchPublicFolder(false)) |result| {
finished = true;
req_ctx.renderServeResult(result) catch |err| {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
};
}
@@ -3860,7 +3860,7 @@ pub const Server = struct {
if (!finished and (req_ctx.bundler.options.routes.single_page_app_routing and req_ctx.url.extname.len == 0)) {
if (!finished) {
req_ctx.sendSinglePageHTML() catch |err| {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
};
}
@@ -3872,7 +3872,7 @@ pub const Server = struct {
// if we're about to 404 and it's the favicon, use our stand-in
if (strings.eqlComptime(req_ctx.url.path, "favicon.ico")) {
req_ctx.sendFavicon() catch |err| {
- Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path });
+ Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path });
did_print = true;
};
return;
diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig
index 2d7ddf47b..24aeaf472 100644
--- a/src/http/websocket_http_client.zig
+++ b/src/http/websocket_http_client.zig
@@ -51,8 +51,8 @@ fn buildRequestBody(vm: *JSC.VirtualMachine, pathname: *const JSC.ZigString, hos
const pico_headers = PicoHTTP.Headers{ .headers = headers_ };
return try std.fmt.allocPrint(
allocator,
- "GET {any} HTTP/1.1\r\n" ++
- "Host: {any}\r\n" ++
+ "GET {s} HTTP/1.1\r\n" ++
+ "Host: {s}\r\n" ++
"Pragma: no-cache\r\n" ++
"Cache-Control: no-cache\r\n" ++
"Connection: Upgrade\r\n" ++
@@ -978,7 +978,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
var header_bytes: [@sizeOf(usize)]u8 = [_]u8{0} ** @sizeOf(usize);
while (true) {
- log("onData ({any})", .{@tagName(receive_state)});
+ log("onData ({s})", .{@tagName(receive_state)});
switch (receive_state) {
// 0 1 2 3
diff --git a/src/http_client_async.zig b/src/http_client_async.zig
index 1e50199d3..818b8cb71 100644
--- a/src/http_client_async.zig
+++ b/src/http_client_async.zig
@@ -128,7 +128,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
pending.hostname_len = @truncate(u8, hostname.len);
pending.port = port;
- log("- Keep-Alive release {any}:{d}", .{ hostname, port });
+ log("- Keep-Alive release {s}:{d}", .{ hostname, port });
return;
}
}
@@ -293,7 +293,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
continue;
}
- log("+ Keep-Alive reuse {any}:{d}", .{ hostname, port });
+ log("+ Keep-Alive reuse {s}:{d}", .{ hostname, port });
return http_socket;
}
}
@@ -480,7 +480,7 @@ pub fn onOpen(
std.debug.assert(is_ssl == client.url.isHTTPS());
}
- log("Connected {any} \n", .{client.url.href});
+ log("Connected {s} \n", .{client.url.href});
if (comptime is_ssl) {
var ssl: *BoringSSL.SSL = @ptrCast(*BoringSSL.SSL, socket.getNativeHandle());
@@ -512,7 +512,7 @@ pub fn onClose(
comptime is_ssl: bool,
socket: NewHTTPContext(is_ssl).HTTPSocket,
) void {
- log("Closed {any}\n", .{client.url.href});
+ log("Closed {s}\n", .{client.url.href});
const in_progress = client.state.stage != .done and client.state.stage != .fail;
@@ -545,7 +545,7 @@ pub fn onTimeout(
socket: NewHTTPContext(is_ssl).HTTPSocket,
) void {
_ = socket;
- log("Timeout {any}\n", .{client.url.href});
+ log("Timeout {s}\n", .{client.url.href});
if (client.state.stage != .done and client.state.stage != .fail)
client.fail(error.Timeout);
@@ -556,7 +556,7 @@ pub fn onConnectError(
socket: NewHTTPContext(is_ssl).HTTPSocket,
) void {
_ = socket;
- log("onConnectError {any}\n", .{client.url.href});
+ log("onConnectError {s}\n", .{client.url.href});
if (client.state.stage != .done and client.state.stage != .fail)
client.fail(error.ConnectionRefused);
@@ -566,7 +566,7 @@ pub fn onEnd(
comptime is_ssl: bool,
_: NewHTTPContext(is_ssl).HTTPSocket,
) void {
- log("onEnd {any}\n", .{client.url.href});
+ log("onEnd {s}\n", .{client.url.href});
if (client.state.stage != .done and client.state.stage != .fail)
client.fail(error.ConnectionClosed);
@@ -1032,7 +1032,7 @@ pub const AsyncHTTP = struct {
this.real.?.* = this.*;
this.real.?.response_buffer = this.response_buffer;
- log("onAsyncHTTPComplete: {any}", .{bun.fmt.fmtDuration(this.elapsed)});
+ log("onAsyncHTTPComplete: {s}", .{bun.fmt.fmtDuration(this.elapsed)});
default_allocator.destroy(this);
@@ -1062,7 +1062,7 @@ pub const AsyncHTTP = struct {
}
this.client.start(this.request_body, this.response_buffer);
- log("onStart: {any}", .{bun.fmt.fmtDuration(this.elapsed)});
+ log("onStart: {s}", .{bun.fmt.fmtDuration(this.elapsed)});
}
};
@@ -1893,7 +1893,7 @@ pub fn handleResponseMetadata(
const original_url = this.url;
this.url = URL.parse(std.fmt.bufPrint(
&url_buf.data,
- "{any}://{any}{any}",
+ "{any}://{any}{s}",
.{ original_url.displayProtocol(), original_url.displayHostname(), location },
) catch return error.RedirectURLTooLong);
diff --git a/src/install/bin.zig b/src/install/bin.zig
index b0d67c01e..51ae962a6 100644
--- a/src/install/bin.zig
+++ b/src/install/bin.zig
@@ -146,7 +146,7 @@ pub const Bin = extern struct {
bin: Bin,
i: usize = 0,
done: bool = false,
- dir_iterator: ?std.fs.Dir.Iterator = null,
+ dir_iterator: ?std.fs.IterableDir.Iterator = null,
package_name: String,
package_installed_node_modules: std.fs.Dir = std.fs.Dir{ .fd = std.math.maxInt(std.os.fd_t) },
buf: [bun.MAX_PATH_BYTES]u8 = undefined,
@@ -167,7 +167,7 @@ pub const Bin = extern struct {
var joined = Path.joinStringBuf(&this.buf, &parts, .auto);
this.buf[joined.len] = 0;
var joined_: [:0]u8 = this.buf[0..joined.len :0];
- var child_dir = try dir.openIterableDirZ(joined_, .{});
+ var child_dir = try bun.openIterableDirZ(dir.fd, joined_);
this.dir_iterator = child_dir.iterate();
}
@@ -419,15 +419,16 @@ pub const Bin = extern struct {
var joined = Path.joinStringBuf(&target_buf, &parts, .auto);
@intToPtr([*]u8, @ptrToInt(joined.ptr))[joined.len] = 0;
var joined_: [:0]const u8 = joined.ptr[0..joined.len :0];
- var child_dir = dir.openIterableDirZ(joined_, .{}) catch |err| {
- this.err = err;
- return;
- };
+ var child_dir = bun.openIterableDirZ(dir.fd, joined_) catch |err| {
+ this.err = err;
+ return;
+ };
+
defer child_dir.close();
var iter = child_dir.iterate();
- var basedir_path = std.os.getFdPath(child_dir.fd, &target_buf) catch |err| {
+ var basedir_path = std.os.getFdPath(child_dir.dir.fd, &target_buf) catch |err| {
this.err = err;
return;
};
@@ -436,9 +437,9 @@ pub const Bin = extern struct {
var prev_target_buf_remain = target_buf_remain;
while (iter.next() catch null) |entry_| {
- const entry: std.fs.Dir.Entry = entry_;
+ const entry: std.fs.IterableDir.Entry = entry_;
switch (entry.kind) {
- std.fs.Dir.Entry.Kind.SymLink, std.fs.Dir.Entry.Kind.File => {
+ std.fs.IterableDir.Entry.Kind.SymLink, std.fs.IterableDir.Entry.Kind.File => {
target_buf_remain = prev_target_buf_remain;
std.mem.copy(u8, target_buf_remain, entry.name);
target_buf_remain = target_buf_remain[entry.name.len..];
@@ -571,7 +572,7 @@ pub const Bin = extern struct {
var joined = Path.joinStringBuf(&target_buf, &parts, .auto);
@intToPtr([*]u8, @ptrToInt(joined.ptr))[joined.len] = 0;
var joined_: [:0]const u8 = joined.ptr[0..joined.len :0];
- var child_dir = dir.openIterableDirZ(joined_, .{}) catch |err| {
+ var child_dir = bun.openIterableDirZFromDir(dir, joined_) catch |err| {
this.err = err;
return;
};
@@ -579,7 +580,7 @@ pub const Bin = extern struct {
var iter = child_dir.iterate();
- var basedir_path = std.os.getFdPath(child_dir.fd, &target_buf) catch |err| {
+ var basedir_path = std.os.getFdPath(child_dir.dir.fd, &target_buf) catch |err| {
this.err = err;
return;
};
@@ -588,9 +589,9 @@ pub const Bin = extern struct {
var prev_target_buf_remain = target_buf_remain;
while (iter.next() catch null) |entry_| {
- const entry: std.fs.Dir.Entry = entry_;
+ const entry: std.fs.IterableDir.Entry = entry_;
switch (entry.kind) {
- std.fs.Dir.Entry.Kind.SymLink, std.fs.Dir.Entry.Kind.File => {
+ std.fs.IterableDir.Entry.Kind.SymLink, std.fs.IterableDir.Entry.Kind.File => {
target_buf_remain = prev_target_buf_remain;
std.mem.copy(u8, target_buf_remain, entry.name);
target_buf_remain = target_buf_remain[entry.name.len..];
diff --git a/src/install/install.zig b/src/install/install.zig
index 8d9701611..bdf16faa9 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -889,10 +889,10 @@ const PackageInstall = struct {
fn installWithClonefileEachDir(this: *PackageInstall) !Result {
const Walker = @import("../walker_skippable.zig");
- var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{
+ var cached_package_dir = bun.openIterableDirZFromDir(this.cache_dir, this.cache_dir_subpath) catch |err| return Result{
.fail = .{ .err = err, .step = .opening_cache_dir },
};
- defer cached_package_dir.close();
+ defer cached_package_dir.dir.close();
var walker_ = Walker.walk(
cached_package_dir,
this.allocator,
@@ -921,7 +921,7 @@ const PackageInstall = struct {
var path: [:0]u8 = stackpath[0..entry.path.len :0];
var basename: [:0]u8 = stackpath[entry.path.len - entry.basename.len .. entry.path.len :0];
switch (C.clonefileat(
- entry.dir.fd,
+ entry.dir.dir.fd,
basename,
destination_dir_.fd,
path,
@@ -1003,7 +1003,7 @@ const PackageInstall = struct {
const Walker = @import("../walker_skippable.zig");
const CopyFile = @import("../copy_file.zig");
- var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{
+ var cached_package_dir = bun.openIterableDirZFromDir(this.cache_dir, this.cache_dir_subpath) catch |err| return Result{
.fail = .{ .err = err, .step = .opening_cache_dir },
};
defer cached_package_dir.close();
@@ -1043,14 +1043,14 @@ const PackageInstall = struct {
};
defer outfile.close();
- var infile = try entry.dir.openFile(entry.basename, .{ .mode = .read_only });
+ var infile = try entry.dir.dir.openFile(entry.basename, .{ .mode = .read_only });
defer infile.close();
const stat = infile.stat() catch continue;
_ = C.fchmod(outfile.handle, stat.mode);
CopyFile.copy(infile.handle, outfile.handle) catch {
- entry.dir.copyFile(entry.basename, destination_dir_, entry.path, .{}) catch |err| {
+ entry.dir.dir.copyFile(entry.basename, destination_dir_, entry.path, .{}) catch |err| {
progress_.root.end();
progress_.refresh();
@@ -1083,7 +1083,7 @@ const PackageInstall = struct {
fn installWithHardlink(this: *PackageInstall) !Result {
const Walker = @import("../walker_skippable.zig");
- var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{
+ var cached_package_dir = bun.openIterableDirZFromDir(this.cache_dir, this.cache_dir_subpath) catch |err| return Result{
.fail = .{ .err = err, .step = .opening_cache_dir },
};
defer cached_package_dir.close();
@@ -1109,7 +1109,7 @@ const PackageInstall = struct {
std.os.mkdirat(destination_dir_.fd, entry.path, 0o755) catch {};
},
.File => {
- try std.os.linkat(entry.dir.fd, entry.basename, destination_dir_.fd, entry.path, 0);
+ try std.os.linkat(entry.dir.dir.fd, entry.basename, destination_dir_.fd, entry.path, 0);
real_file_count += 1;
},
else => {},
@@ -1144,7 +1144,7 @@ const PackageInstall = struct {
fn installWithSymlink(this: *PackageInstall) !Result {
const Walker = @import("../walker_skippable.zig");
- var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{
+ var cached_package_dir = bun.openIterableDirZFromDir(this.cache_dir, this.cache_dir_subpath) catch |err| return Result{
.fail = .{ .err = err, .step = .opening_cache_dir },
};
defer cached_package_dir.close();
@@ -1221,7 +1221,7 @@ const PackageInstall = struct {
this.file_count = FileCopier.copy(
subdir.fd,
- cached_package_dir.fd,
+ cached_package_dir.dir.fd,
&walker_,
) catch |err|
return Result{
@@ -1868,7 +1868,7 @@ pub const PackageManager = struct {
var end: []u8 = undefined;
if (scope.url.hostname.len > 32 or available.len < 64) {
const visible_hostname = scope.url.hostname[0..@min(scope.url.hostname.len, 12)];
- end = std.fmt.bufPrint(available, "@@{s}__{x}", .{ visible_hostname, String.Builder.stringHash(scope.url.href) }) catch unreachable;
+ end = std.fmt.bufPrint(available, "@@{s}__{any}", .{ visible_hostname, bun.fmt.x(String.Builder.stringHash(scope.url.href)) }) catch unreachable;
} else {
end = std.fmt.bufPrint(available, "@@{s}", .{scope.url.hostname}) catch unreachable;
}
@@ -1893,14 +1893,14 @@ pub const PackageManager = struct {
} else if (version.tag.hasPre() and version.tag.hasBuild()) {
return std.fmt.bufPrintZ(
buf,
- "{s}@{d}.{d}.{d}-{x}+{X}",
- .{ name, version.major, version.minor, version.patch, version.tag.pre.hash, version.tag.build.hash },
+ "{s}@{d}.{d}.{d}-{any}+{X}",
+ .{ name, version.major, version.minor, version.patch, version.tag.pre.hash, bun.fmt.x(version.tag.build.hash) },
) catch unreachable;
} else if (version.tag.hasPre()) {
return std.fmt.bufPrintZ(
buf,
- "{s}@{d}.{d}.{d}-{x}",
- .{ name, version.major, version.minor, version.patch, version.tag.pre.hash },
+ "{s}@{d}.{d}.{d}-{any}",
+ .{ name, version.major, version.minor, version.patch, bun.fmt.x(version.tag.pre.hash) },
) catch unreachable;
} else if (version.tag.hasBuild()) {
return std.fmt.bufPrintZ(
@@ -5464,10 +5464,10 @@ pub const PackageManager = struct {
if (cwd.openDirZ(manager.options.bin_path, .{}, true)) |node_modules_bin_| {
var node_modules_bin: std.fs.Dir = node_modules_bin_;
const iterable_dir = std.fs.IterableDir{ .dir = .{ .fd = node_modules_bin.fd } };
- var iter: std.fs.Dir.Iterator = iterable_dir.iterate();
+ var iter: std.fs.IterableDir.Iterator = iterable_dir.iterate();
iterator: while (iter.next() catch null) |entry| {
switch (entry.kind) {
- std.fs.Dir.Entry.Kind.SymLink => {
+ std.fs.IterableDir.Entry.Kind.SymLink => {
// any symlinks which we are unable to open are assumed to be dangling
// note that using access won't work here, because access doesn't resolve symlinks
@@ -6684,7 +6684,7 @@ pub const PackageManager = struct {
// bun install may have installed new bins, so we need to update the PATH
// this can happen if node_modules/.bin didn't previously exist
// note: it is harmless to have the same directory in the PATH multiple times
- const current_path = manager.env.map.get("PATH");
+ const current_path = manager.env.map.get("PATH") orelse "/";
// TODO: windows
const cwd_without_trailing_slash = if (Fs.FileSystem.instance.top_level_dir.len > 1 and Fs.FileSystem.instance.top_level_dir[Fs.FileSystem.instance.top_level_dir.len - 1] == '/')
@@ -6696,7 +6696,7 @@ pub const PackageManager = struct {
ctx.allocator,
"{s}:{s}/node_modules/.bin",
.{
- current_path,
+ current_path ,
cwd_without_trailing_slash,
},
));
diff --git a/src/install/npm.zig b/src/install/npm.zig
index 112580c4c..ef45f956b 100644
--- a/src/install/npm.zig
+++ b/src/install/npm.zig
@@ -31,6 +31,7 @@ const ObjectPool = @import("../pool.zig").ObjectPool;
const Api = @import("../api/schema.zig").Api;
const DotEnv = @import("../env_loader.zig");
const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
+const bun = @import("../global.zig");
const Npm = @This();
@@ -556,18 +557,18 @@ pub const PackageManifest = struct {
var out_path_buf: ["-18446744073709551615".len + ".npm".len + 1]u8 = undefined;
var dest_path_stream = std.io.fixedBufferStream(&dest_path_buf);
var dest_path_stream_writer = dest_path_stream.writer();
- try dest_path_stream_writer.print("{x}.npm-{x}", .{ file_id, @max(std.time.milliTimestamp(), 0) });
+ try dest_path_stream_writer.print("{any}.npm-{any}", .{ bun.fmt.x(file_id), bun.fmt.x(@max(std.time.milliTimestamp(), 0)) });
try dest_path_stream_writer.writeByte(0);
var tmp_path: [:0]u8 = dest_path_buf[0 .. dest_path_stream.pos - 1 :0];
try writeFile(this, tmp_path, tmpdir);
- var out_path = std.fmt.bufPrintZ(&out_path_buf, "{x}.npm", .{file_id}) catch unreachable;
+ var out_path = std.fmt.bufPrintZ(&out_path_buf, "{any}.npm", .{bun.fmt.x(file_id)}) catch unreachable;
try std.os.renameatZ(tmpdir.fd, tmp_path, cache_dir.fd, out_path);
}
pub fn load(allocator: std.mem.Allocator, cache_dir: std.fs.Dir, package_name: string) !?PackageManifest {
const file_id = std.hash.Wyhash.hash(0, package_name);
var file_path_buf: [512 + 64]u8 = undefined;
- var file_path = try std.fmt.bufPrintZ(&file_path_buf, "{x}.npm", .{file_id});
+ var file_path = try std.fmt.bufPrintZ(&file_path_buf, "{any}.npm", .{bun.fmt.x(file_id)});
var cache_file = cache_dir.openFileZ(
file_path,
.{ .mode = .read_only },
diff --git a/src/io/io_darwin.zig b/src/io/io_darwin.zig
index c2160d5d9..104da0507 100644
--- a/src/io/io_darwin.zig
+++ b/src/io/io_darwin.zig
@@ -746,7 +746,7 @@ fn flush(self: *IO, comptime _: @Type(.EnumLiteral)) !void {
);
if (new_events_ < 0) {
- return std.debug.panic("kevent() failed {any}", .{@tagName(std.c.getErrno(new_events_))});
+ return std.debug.panic("kevent() failed {s}", .{@tagName(std.c.getErrno(new_events_))});
}
const new_events = @intCast(usize, new_events_);
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 7eede039c..1e608b54b 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -1626,6 +1626,14 @@ pub const E = struct {
rope_len: u32 = 0,
is_utf16: bool = false,
+ pub fn format(this: String, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
+ if (this.isUTF8()) {
+ try writer.writeAll(this.data);
+ } else {
+ try writer.print("{any}", .{JSC.ZigString.init16(this.slice16())});
+ }
+ }
+
pub var class = E.String{ .data = "class" };
pub fn push(this: *String, other: *String) void {
std.debug.assert(this.isUTF8());
@@ -4872,7 +4880,7 @@ pub const Macro = struct {
source,
import_range,
log.msgs.allocator,
- "Macro \"{any}\" not found",
+ "Macro \"{s}\" not found",
.{import_record_path},
.stmt,
err,
@@ -4884,7 +4892,7 @@ pub const Macro = struct {
source,
import_range,
log.msgs.allocator,
- "{any} resolving macro \"{any}\"",
+ "{s} resolving macro \"{s}\"",
.{ @errorName(err), import_record_path },
) catch unreachable;
return err;
@@ -5547,7 +5555,7 @@ pub const Macro = struct {
},
else => {
if (comptime Environment.isDebug) {
- Output.prettyWarnln("initExpr fail: {any}", .{@tagName(this.data)});
+ Output.prettyWarnln("initExpr fail: {s}", .{@tagName(this.data)});
}
return JSNode{ .loc = this.loc, .data = .{ .e_missing = .{} } };
},
@@ -5957,7 +5965,7 @@ pub const Macro = struct {
if (!@hasField(JSNode.Tag, name)) {
@compileError(
"JSNode.Tag does not have a \"" ++ name ++ "\" field. Valid fields are " ++ std.fmt.comptimePrint(
- "{any}",
+ "{s}",
.{
std.meta.fieldNames(@TypeOf(valid_tags)),
},
@@ -6709,7 +6717,7 @@ pub const Macro = struct {
Tag.e_super, Tag.e_null, Tag.e_undefined, Tag.e_missing, Tag.inline_true, Tag.inline_false, Tag.e_this => {
self.args.append(Expr{ .loc = loc, .data = Tag.ids.get(tag) }) catch unreachable;
},
- else => Global.panic("Tag \"{any}\" is not implemented yet.", .{@tagName(tag)}),
+ else => Global.panic("Tag \"{s}\" is not implemented yet.", .{@tagName(tag)}),
}
return true;
@@ -6745,7 +6753,7 @@ pub const Macro = struct {
tag_expr.loc,
);
},
- else => Global.panic("Not implemented yet top-level jsx element: {any}", .{@tagName(tag_expr.data)}),
+ else => Global.panic("Not implemented yet top-level jsx element: {s}", .{@tagName(tag_expr.data)}),
}
} else {
const loc = logger.Loc.Empty;
@@ -6782,16 +6790,12 @@ pub const Macro = struct {
var p = self.p;
const node_type: JSNode.Tag = JSNode.Tag.names.get(str.data) orelse {
- if (!str.isUTF8()) {
- self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16())}) catch unreachable;
- } else {
- self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{str.data}) catch unreachable;
- }
+ self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{str}) catch unreachable;
return false;
};
if (!valid_tags.get(node_type)) {
- self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid here", .{str.data}) catch unreachable;
+ self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid here", .{str}) catch unreachable;
}
return self.writeNodeType(node_type, element.properties.slice(), element.children.slice(), tag_expr.loc);
@@ -6805,9 +6809,9 @@ pub const Macro = struct {
const node_type: JSNode.Tag = JSNode.Tag.names.get(str.data) orelse {
if (!str.isUTF8()) {
- self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16())}) catch unreachable;
+ self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16()) catch unreachable}) catch unreachable;
} else {
- self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{str.data}) catch unreachable;
+ self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid", .{str.data}) catch unreachable;
}
return false;
};
@@ -7838,7 +7842,7 @@ pub const Macro = struct {
this.source,
this.caller.loc,
this.allocator,
- "cannot coerce {any} to Bun's AST. Please return a valid macro using the JSX syntax",
+ "cannot coerce {s} to Bun's AST. Please return a valid macro using the JSX syntax",
.{@tagName(value.jsType())},
) catch unreachable;
break :brk error.MacroFailed;
@@ -8085,7 +8089,7 @@ pub const Macro = struct {
this.source,
this.caller.loc,
this.allocator,
- "cannot coerce {any} to Bun's AST. Please return a valid macro using the JSX syntax",
+ "cannot coerce {s} to Bun's AST. Please return a valid macro using the JSX syntax",
.{@tagName(value.jsType())},
) catch unreachable;
return error.MacroFailed;
@@ -8106,7 +8110,7 @@ pub const Macro = struct {
visitor: Visitor,
javascript_object: JSC.JSValue,
) MacroError!Expr {
- if (comptime Environment.isDebug) Output.prettyln("<r><d>[macro]<r> call <d><b>{any}<r>", .{function_name});
+ if (comptime Environment.isDebug) Output.prettyln("<r><d>[macro]<r> call <d><b>{s}<r>", .{function_name});
exception_holder = Zig.ZigException.Holder.init();
expr_nodes_buf[0] = JSNode.initExpr(caller);
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index 19591c2a6..0b3ce2a2b 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -196,7 +196,7 @@ fn NewLexer_(
pub fn addDefaultError(self: *LexerType, msg: []const u8) !void {
@setCold(true);
- self.addError(self.start, "{any}", .{msg}, true);
+ self.addError(self.start, "{s}", .{msg}, true);
return Error.SyntaxError;
}
@@ -816,7 +816,7 @@ fn NewLexer_(
}
pub fn addUnsupportedSyntaxError(self: *LexerType, msg: []const u8) !void {
- self.addError(self.end, "Unsupported syntax: {any}", .{msg}, true);
+ self.addError(self.end, "Unsupported syntax: {s}", .{msg}, true);
return Error.SyntaxError;
}
@@ -936,7 +936,7 @@ fn NewLexer_(
if (!isIdentifier(identifier)) {
try lexer.addRangeError(
.{ .loc = logger.usize2Loc(lexer.start), .len = @intCast(i32, lexer.end - lexer.start) },
- "Invalid identifier: \"{any}\"",
+ "Invalid identifier: \"{s}\"",
.{result.contents},
true,
);
@@ -965,13 +965,13 @@ fn NewLexer_(
pub fn expectContextualKeyword(self: *LexerType, comptime keyword: string) !void {
if (!self.isContextualKeyword(keyword)) {
if (@import("builtin").mode == std.builtin.Mode.Debug) {
- self.addError(self.start, "Expected \"{any}\" but found \"{any}\" (token: {any})", .{
+ self.addError(self.start, "Expected \"{s}\" but found \"{s}\" (token: {any})", .{
keyword,
self.raw(),
self.token,
}, true);
} else {
- self.addError(self.start, "Expected \"{any}\" but found \"{any}\"", .{ keyword, self.raw() }, true);
+ self.addError(self.start, "Expected \"{s}\" but found \"{s}\"", .{ keyword, self.raw() }, true);
}
return Error.UnexpectedSyntax;
}
@@ -1743,7 +1743,7 @@ fn NewLexer_(
}
};
- try lexer.addRangeError(lexer.range(), "Unexpected {any}", .{found}, true);
+ try lexer.addRangeError(lexer.range(), "Unexpected {s}", .{found}, true);
}
pub fn raw(self: *LexerType) []const u8 {
@@ -1763,7 +1763,7 @@ fn NewLexer_(
}
};
- try self.addRangeError(self.range(), "Expected {any} but found \"{any}\"", .{ text, found }, true);
+ try self.addRangeError(self.range(), "Expected {s} but found \"{s}\"", .{ text, found }, true);
}
fn scanCommentText(lexer: *LexerType) void {
@@ -2090,7 +2090,7 @@ fn NewLexer_(
lexer.step();
}
} else {
- try lexer.addSyntaxError(lexer.range().endI(), "Expected identifier after \"{any}\" in namespaced JSX name", .{lexer.raw()});
+ try lexer.addSyntaxError(lexer.range().endI(), "Expected identifier after \"{s}\" in namespaced JSX name", .{lexer.raw()});
}
}
@@ -2329,10 +2329,10 @@ fn NewLexer_(
cursor.c = std.fmt.parseInt(i32, number, base) catch |err| brk: {
switch (err) {
error.InvalidCharacter => {
- lexer.addError(lexer.start, "Invalid JSX entity escape: {any}", .{entity}, false);
+ lexer.addError(lexer.start, "Invalid JSX entity escape: {s}", .{entity}, false);
},
error.Overflow => {
- lexer.addError(lexer.start, "JSX entity escape is too big: {any}", .{entity}, false);
+ lexer.addError(lexer.start, "JSX entity escape is too big: {s}", .{entity}, false);
},
}
@@ -2630,7 +2630,7 @@ fn NewLexer_(
if (std.fmt.parseFloat(f64, text)) |num| {
lexer.number = num;
} else |_| {
- try lexer.addSyntaxError(lexer.start, "Invalid number {any}", .{text});
+ try lexer.addSyntaxError(lexer.start, "Invalid number {s}", .{text});
}
}
}
diff --git a/src/js_parser.zig b/src/js_parser.zig
index 8e87a9753..970bb63dc 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -977,7 +977,7 @@ const StaticSymbolName = struct {
pub const List = struct {
fn NewStaticSymbol(comptime basename: string) StaticSymbolName {
return comptime StaticSymbolName{
- .internal = basename ++ "_" ++ std.fmt.comptimePrint("{x}", .{std.hash.Wyhash.hash(0, basename)}),
+ .internal = basename ++ "_" ++ std.fmt.comptimePrint("{any}", .{bun.fmt.x(std.hash.Wyhash.hash(0, basename))}),
.primary = basename,
.backup = "_" ++ basename ++ "$",
};
@@ -985,7 +985,7 @@ const StaticSymbolName = struct {
fn NewStaticSymbolWithBackup(comptime basename: string, comptime backup: string) StaticSymbolName {
return comptime StaticSymbolName{
- .internal = basename ++ "_" ++ std.fmt.comptimePrint("{x}", .{std.hash.Wyhash.hash(0, basename)}),
+ .internal = basename ++ "_" ++ std.fmt.comptimePrint("{any}", .{bun.fmt.x(std.hash.Wyhash.hash(0, basename))}),
.primary = basename,
.backup = backup,
};
@@ -4263,19 +4263,20 @@ fn NewParser_(
p.import_records.items[import_record_index].was_originally_require = true;
p.import_records.items[import_record_index].contains_import_star = true;
- const symbol_name = p.import_records.items[import_record_index].path.name.nonUniqueNameString(p.allocator);
+ const symbol_name = p.import_records.items[import_record_index].path.name.nonUniqueNameString(p.allocator) catch unreachable;
const cjs_import_name = std.fmt.allocPrint(
p.allocator,
- "{any}_{x}_{d}",
+ "{s}_{s}_{d}",
.{
+ // THIS WASN'T ERRORING BEFORE
symbol_name,
- @truncate(
+ bun.fmt.x(@truncate(
u16,
std.hash.Wyhash.hash(
0,
p.import_records.items[import_record_index].path.text,
),
- ),
+ )),
p.cjs_import_stmts.items.len,
},
) catch unreachable;
@@ -4714,7 +4715,7 @@ fn NewParser_(
// Duplicate exports are an error
var notes = try p.allocator.alloc(logger.Data, 1);
notes[0] = logger.Data{
- .text = try std.fmt.allocPrint(p.allocator, "\"{any}\" was originally exported here", .{alias}),
+ .text = try std.fmt.allocPrint(p.allocator, "\"{s}\" was originally exported here", .{alias}),
.location = logger.Location.init_or_nil(p.source, js_lexer.rangeOfIdentifier(p.source, name.alias_loc)),
};
try p.log.addRangeErrorFmtWithNotes(
@@ -4722,7 +4723,7 @@ fn NewParser_(
js_lexer.rangeOfIdentifier(p.source, loc),
p.allocator,
notes,
- "Multiple exports with the same name \"{any}\"",
+ "Multiple exports with the same name \"{s}\"",
.{std.mem.trim(u8, alias, "\"'")},
);
} else {
diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig
index 8493f3fab..55212f6f2 100644
--- a/src/libarchive/libarchive.zig
+++ b/src/libarchive/libarchive.zig
@@ -406,9 +406,9 @@ pub const Archive = struct {
// if the destination doesn't exist, we skip the whole thing since nothing can overwrite it.
if (std.fs.path.isAbsolute(root)) {
- break :brk std.fs.openIterableDirAbsolute(root, .{}) catch return;
+ break :brk (std.fs.openIterableDirAbsolute(root, .{}) catch return).dir;
} else {
- break :brk cwd.openIterableDir(root, .{}) catch return;
+ break :brk (cwd.openIterableDir(root, .{}) catch return).dir;
}
};
@@ -648,7 +648,7 @@ pub const Archive = struct {
comptime close_handles: bool,
comptime log: bool,
) !u32 {
- var dir: std.fs.Dir = brk: {
+ var dir: std.fs.IterableDir = brk: {
const cwd = std.fs.cwd();
cwd.makePath(
root,
@@ -661,7 +661,7 @@ pub const Archive = struct {
}
};
- defer if (comptime close_handles) dir.close();
+ defer if (comptime close_handles) dir.dir.close();
return try extractToDir(file_buffer, dir, ctx, FilePathAppender, appender, depth_to_skip, close_handles, log);
}
};
diff --git a/src/linker.zig b/src/linker.zig
index 61b5488fb..7427ef3b5 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -924,7 +924,7 @@ pub const Linker = struct {
return Fs.Path.init(try origin.joinAlloc(
linker.allocator,
- std.fmt.bufPrint(&hash_buf, "hash:{x}/", .{modkey.hash()}) catch unreachable,
+ std.fmt.bufPrint(&hash_buf, "hash:{any}/", .{bun.fmt.x(modkey.hash())}) catch unreachable,
dirname,
basename,
absolute_pathname.ext,
diff --git a/src/main.zig b/src/main.zig
index 22ba7e97d..dfcea84b5 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -26,9 +26,7 @@ const JavaScript = @import("bun.js/javascript.zig");
pub const io_mode = .blocking;
pub const bindgen = if (@import("builtin").is_test) undefined else @import("build_options").bindgen;
const Report = @import("./report.zig");
-pub fn panic(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace) noreturn {
- MainPanicHandler.handle_panic(msg, error_return_trace);
-}
+pub const panic = MainPanicHandler.handle_panic;
const CrashReporter = @import("./crash_reporter.zig");
diff --git a/src/napi/napi.zig b/src/napi/napi.zig
index a1c424247..dc685ffe1 100644
--- a/src/napi/napi.zig
+++ b/src/napi/napi.zig
@@ -1030,10 +1030,10 @@ pub export fn napi_fatal_error(location_ptr: ?[*:0]const u8, location_len: usize
const location = napiSpan(location_ptr, location_len);
if (location.len > 0) {
- bun.Global.panic("napi: {any}\n {any}", .{ message, location });
+ bun.Global.panic("napi: {s}\n {s}", .{ message, location });
}
- bun.Global.panic("napi: {any}", .{message});
+ bun.Global.panic("napi: {s}", .{message});
}
pub export fn napi_create_buffer(env: napi_env, length: usize, data: [*]*anyopaque, result: *napi_value) napi_status {
var buf = JSC.ExternalBuffer.create(null, @ptrCast([*]u8, data)[0..length], env, null, env.bunVM().allocator) catch {
diff --git a/src/node_module_bundle.zig b/src/node_module_bundle.zig
index 37f9f9984..22ef3010a 100644
--- a/src/node_module_bundle.zig
+++ b/src/node_module_bundle.zig
@@ -174,9 +174,9 @@ pub const NodeModuleBundle = struct {
) !string {
return try std.fmt.allocPrint(
allocator,
- "{x}/{any}",
+ "{any}/{s}",
.{
- this.bundle.packages[to.package_id].hash,
+ bun.fmt.x(this.bundle.packages[to.package_id].hash),
this.str(to.path),
123,
},
diff --git a/src/panic_handler.zig b/src/panic_handler.zig
index f07d2f6bb..cb7877b87 100644
--- a/src/panic_handler.zig
+++ b/src/panic_handler.zig
@@ -16,7 +16,7 @@ const Features = @import("./analytics/analytics_thread.zig").Features;
const HTTP = @import("http").AsyncHTTP;
const Report = @import("./report.zig");
-pub fn NewPanicHandler(comptime panic_func: fn (msg: []const u8, error_return_type: ?*std.builtin.StackTrace) noreturn) type {
+pub fn NewPanicHandler(comptime panic_func: @TypeOf(std.builtin.default_panic)) type {
return struct {
panic_count: usize = 0,
skip_next_panic: bool = false,
@@ -30,7 +30,7 @@ pub fn NewPanicHandler(comptime panic_func: fn (msg: []const u8, error_return_ty
.log = log,
};
}
- pub inline fn handle_panic(msg: []const u8, error_return_type: ?*std.builtin.StackTrace) noreturn {
+ pub fn handle_panic(msg: []const u8, error_return_type: ?*std.builtin.StackTrace, addr: ?usize) noreturn {
// This exists to ensure we flush all buffered output before panicking.
Output.flush();
@@ -39,7 +39,7 @@ pub fn NewPanicHandler(comptime panic_func: fn (msg: []const u8, error_return_ty
Output.disableBuffering();
// // We want to always inline the panic handler so it doesn't show up in the stacktrace.
- @call(.{ .modifier = .always_inline }, panic_func, .{ msg, error_return_type });
+ @call(.{ .modifier = .always_inline }, panic_func, .{ msg, error_return_type, addr });
}
};
}
diff --git a/src/report.zig b/src/report.zig
index 0a47a9e98..1e4259f6d 100644
--- a/src/report.zig
+++ b/src/report.zig
@@ -93,9 +93,9 @@ pub const CrashReportWriter = struct {
}
if (tilda) {
- Output.prettyError("\nCrash report saved to:\n <b>~{any}<r>\n", .{display_path});
+ Output.prettyError("\nCrash report saved to:\n <b>~{s}<r>\n", .{display_path});
} else {
- Output.prettyError("\nCrash report saved to:\n <b>{any}<r>\n", .{display_path});
+ Output.prettyError("\nCrash report saved to:\n <b>{s}<r>\n", .{display_path});
}
}
}
@@ -118,8 +118,8 @@ pub fn printMetadata() void {
crash_report_writer.print(
\\
\\<r>----- bun meta -----
- ++ "\nBun v" ++ Global.package_json_version_with_sha ++ " " ++ platform ++ " " ++ arch ++ " {any}\n" ++
- \\{any}: {}
+ ++ "\nBun v" ++ Global.package_json_version_with_sha ++ " " ++ platform ++ " " ++ arch ++ " {s}\n" ++
+ \\{s}: {}
\\
, .{
analytics_platform.version,
diff --git a/src/sync.zig b/src/sync.zig
index 262f5fa0d..9b73be127 100644
--- a/src/sync.zig
+++ b/src/sync.zig
@@ -610,7 +610,7 @@ pub const RwLock = if (@import("builtin").os.tag != .windows and @import("builti
.netbsd => extern struct {
ptr_magic: c_uint = 0x99990009,
ptr_interlock: switch (@import("builtin").target.cpu.arch) {
- .aarch64, .sparc, .x86_64, .i386 => u8,
+ .aarch64, .sparc, .x86_64, => u8,
.arm, .powerpc => c_int,
else => unreachable,
} = 0,
diff --git a/src/toml/toml_lexer.zig b/src/toml/toml_lexer.zig
index 585d1c913..2ec11c72b 100644
--- a/src/toml/toml_lexer.zig
+++ b/src/toml/toml_lexer.zig
@@ -95,7 +95,7 @@ pub const Lexer = struct {
pub fn addDefaultError(self: *Lexer, msg: []const u8) !void {
@setCold(true);
- self.addError(self.start, "{any}", .{msg}, true);
+ self.addError(self.start, "{s}", .{msg}, true);
return Error.SyntaxError;
}
@@ -325,7 +325,7 @@ pub const Lexer = struct {
if (std.fmt.parseFloat(f64, text)) |num| {
lexer.number = num;
} else |_| {
- try lexer.addSyntaxError(lexer.start, "Invalid number {any}", .{text});
+ try lexer.addSyntaxError(lexer.start, "Invalid number {s}", .{text});
}
}
}
@@ -1124,7 +1124,7 @@ pub const Lexer = struct {
}
};
- try lexer.addRangeError(lexer.range(), "Unexpected {any}", .{found}, true);
+ try lexer.addRangeError(lexer.range(), "Unexpected {s}", .{found}, true);
}
pub fn expectedString(self: *Lexer, text: string) !void {
@@ -1136,7 +1136,7 @@ pub const Lexer = struct {
}
};
- try self.addRangeError(self.range(), "Expected {any} but found {any}", .{ text, found }, true);
+ try self.addRangeError(self.range(), "Expected {s} but found {s}", .{ text, found }, true);
}
pub fn range(self: *Lexer) logger.Range {
diff --git a/src/walker_skippable.zig b/src/walker_skippable.zig
index 14cbcb9ce..c166d2324 100644
--- a/src/walker_skippable.zig
+++ b/src/walker_skippable.zig
@@ -92,7 +92,7 @@ pub fn next(self: *Walker) !?WalkerEntry {
}
}
return WalkerEntry{
- .dir = top.iter.dir,
+ .dir = .{.dir = top.iter.dir},
.basename = self.name_buffer.items[dirname_len..],
.path = self.name_buffer.items,
.kind = base.kind,