aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-03-04 00:20:22 -0800
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-03-04 00:20:22 -0800
commit51fbbea1d3dc2b610fe5fe525229e56b581e5c8a (patch)
treec75e063c4eb7c99737fbf7a5a2014f9546e2e218
parent696710fd7aedee6054947aab002aaa853e9ee731 (diff)
downloadbun-jarred/upgrade-zig-2.tar.gz
bun-jarred/upgrade-zig-2.tar.zst
bun-jarred/upgrade-zig-2.zip
-rw-r--r--src/ast/base.zig2
-rw-r--r--src/blob.zig2
-rw-r--r--src/bundler.zig4
-rw-r--r--src/cache.zig8
-rw-r--r--src/cli.zig12
-rw-r--r--src/cli/create_command.zig17
-rw-r--r--src/cli/install_completions_command.zig6
-rw-r--r--src/cli/run_command.zig6
-rw-r--r--src/cli/upgrade_command.zig4
-rw-r--r--src/comptime_string_map.zig386
-rw-r--r--src/css_scanner.zig2
-rw-r--r--src/deps/zig-clap/clap/args.zig12
-rw-r--r--src/deps/zig-clap/clap/streaming.zig10
-rw-r--r--src/env_loader.zig2
-rw-r--r--src/fallback.version2
-rw-r--r--src/feature_flags.zig2
-rw-r--r--src/fs.zig8
-rw-r--r--src/global.zig7
-rw-r--r--src/http.zig11
-rw-r--r--src/http/mime_type.zig111
-rw-r--r--src/identity_context.zig2
-rw-r--r--src/install/install.zig22
-rw-r--r--src/install/lockfile.zig2
-rw-r--r--src/install/npm.zig60
-rw-r--r--src/install/resolvers/folder_resolver.zig2
-rw-r--r--src/javascript/jsc/bindings/exports.zig54
-rw-r--r--src/javascript/jsc/bindings/process.d.ts8
-rw-r--r--src/javascript/jsc/bindings/shimmer.zig4
-rw-r--r--src/javascript/jsc/javascript.zig91
-rw-r--r--src/javascript/jsc/node/types.zig6
-rw-r--r--src/js_ast.zig133
-rw-r--r--src/js_lexer.zig8
-rw-r--r--src/js_lexer/identifier_data.zig8
-rw-r--r--src/js_lexer_tables.zig15
-rw-r--r--src/js_parser/js_parser.zig167
-rw-r--r--src/js_printer.zig94
-rw-r--r--src/json_parser.zig47
-rw-r--r--src/libarchive/libarchive.zig4
-rw-r--r--src/linker.zig2
-rw-r--r--src/node_fallbacks.zig3
-rw-r--r--src/open.zig3
-rw-r--r--src/options.zig15
-rw-r--r--src/resolver/data_url.zig3
-rw-r--r--src/resolver/resolver.zig18
-rw-r--r--src/resolver/tsconfig_json.zig3
-rw-r--r--src/router.zig2
-rw-r--r--src/string_immutable.zig2
-rw-r--r--src/string_mutable.zig70
-rw-r--r--src/watcher.zig5
49 files changed, 1013 insertions, 454 deletions
diff --git a/src/ast/base.zig b/src/ast/base.zig
index 36800cd7b..827951027 100644
--- a/src/ast/base.zig
+++ b/src/ast/base.zig
@@ -28,7 +28,7 @@ pub const RefHashCtx = struct {
return key.hash();
}
- pub fn eql(_: @This(), ref: Ref, b: Ref) bool {
+ pub fn eql(_: @This(), ref: Ref, b: Ref, _: usize) bool {
return ref.asU64() == b.asU64();
}
};
diff --git a/src/blob.zig b/src/blob.zig
index 65679978d..b914c45a9 100644
--- a/src/blob.zig
+++ b/src/blob.zig
@@ -21,7 +21,7 @@ pub const Map = struct {
pub fn hash(_: @This(), s: u64) u32 {
return @truncate(u32, s);
}
- pub fn eql(_: @This(), a: u64, b: u64) bool {
+ pub fn eql(_: @This(), a: u64, b: u64, _: usize) bool {
return a == b;
}
};
diff --git a/src/bundler.zig b/src/bundler.zig
index 5791201d3..437dd3c9d 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -2976,7 +2976,7 @@ pub const Bundler = struct {
},
else => {
var abs_path = path.text;
- const file = try std.fs.openFileAbsolute(abs_path, .{ .read = true });
+ const file = try std.fs.openFileAbsolute(abs_path, .{ .mode = .read_only });
var stat = try file.stat();
return ServeResult{
.file = options.OutputFile.initFile(file, abs_path, stat.size),
@@ -3360,7 +3360,7 @@ pub const Transformer = struct {
var _log = logger.Log.init(allocator);
const absolutePath = resolve_path.joinAbs(transformer.cwd, .auto, entry_point);
- const file = try std.fs.openFileAbsolute(absolutePath, std.fs.File.OpenFlags{ .read = true });
+ const file = try std.fs.openFileAbsolute(absolutePath, std.fs.File.OpenFlags{ .mode = .read_only });
defer {
if (care_about_closing_files) {
file.close();
diff --git a/src/cache.zig b/src/cache.zig
index 25acce57a..fee17e30b 100644
--- a/src/cache.zig
+++ b/src/cache.zig
@@ -92,7 +92,7 @@ pub const Fs = struct {
const file_handle: std.fs.File = if (_file_handle) |__file|
std.fs.File{ .handle = __file }
else
- try std.fs.openFileAbsoluteZ(path, .{ .read = true });
+ try std.fs.openFileAbsoluteZ(path, .{ .mode = .read_only });
defer {
if (rfs.needToCloseFiles() and _file_handle == null) {
@@ -127,10 +127,10 @@ pub const Fs = struct {
if (_file_handle == null) {
if (FeatureFlags.store_file_descriptors and dirname_fd > 0) {
- file_handle = std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .read = true }) catch |err| brk: {
+ file_handle = std.fs.Dir.openFile(std.fs.Dir{ .fd = dirname_fd }, std.fs.path.basename(path), .{ .mode = .read_only }) catch |err| brk: {
switch (err) {
error.FileNotFound => {
- const handle = try std.fs.openFileAbsolute(path, .{ .read = true });
+ const handle = try std.fs.openFileAbsolute(path, .{ .mode = .read_only });
Output.prettyErrorln(
"<r><d>Internal error: directory mismatch for directory \"{s}\", fd {d}<r>. You don't need to do anything, but this indicates a bug.",
.{ path, dirname_fd },
@@ -141,7 +141,7 @@ pub const Fs = struct {
}
};
} else {
- file_handle = try std.fs.openFileAbsolute(path, .{ .read = true });
+ file_handle = try std.fs.openFileAbsolute(path, .{ .mode = .read_only });
}
}
diff --git a/src/cli.zig b/src/cli.zig
index d6c83d3ce..dcef720b0 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -140,7 +140,7 @@ pub const Arguments = struct {
var paths = [_]string{ cwd, filename };
const outpath = try std.fs.path.resolve(allocator, &paths);
defer allocator.free(outpath);
- var file = try std.fs.openFileAbsolute(outpath, std.fs.File.OpenFlags{ .read = true, .write = false });
+ var file = try std.fs.openFileAbsolute(outpath, std.fs.File.OpenFlags{ .mode = .read_only });
defer file.close();
const stats = try file.stat();
return try file.readToEndAlloc(allocator, stats.size);
@@ -206,7 +206,7 @@ pub const Arguments = struct {
}
fn loadConfigPath(allocator: std.mem.Allocator, auto_loaded: bool, config_path: [:0]const u8, ctx: *Command.Context, comptime cmd: Command.Tag) !void {
- var config_file = std.fs.openFileAbsoluteZ(config_path, .{ .read = true }) catch |err| {
+ var config_file = std.fs.openFileAbsoluteZ(config_path, .{ .mode = .read_only }) catch |err| {
if (auto_loaded) return;
Output.prettyErrorln("<r><red>error<r>: {s} opening config \"{s}\"", .{
@errorName(err),
@@ -694,7 +694,7 @@ pub const PrintBundleCommand = struct {
var out_buffer: [_global.MAX_PATH_BYTES]u8 = undefined;
var stdout = std.io.getStdOut();
- var input = try std.fs.openFileAbsolute(try std.os.realpath(entry_point, &out_buffer), .{ .read = true });
+ var input = try std.fs.openFileAbsolute(try std.os.realpath(entry_point, &out_buffer), .{ .mode = .read_only });
const params = comptime [_]Arguments.ParamType{
clap.parseParam("--summary Peek inside the .bun") catch unreachable,
};
@@ -1083,7 +1083,7 @@ pub const Command = struct {
var file_path = script_name_to_search;
const file_: std.fs.File.OpenError!std.fs.File = brk: {
if (script_name_to_search[0] == std.fs.path.sep) {
- break :brk std.fs.openFileAbsolute(script_name_to_search, .{ .read = true });
+ break :brk std.fs.openFileAbsolute(script_name_to_search, .{ .mode = .read_only });
} else if (!strings.hasPrefix(script_name_to_search, "..") and script_name_to_search[0] != '~') {
const file_pathZ = brk2: {
if (!strings.hasPrefix(file_path, "./")) {
@@ -1098,7 +1098,7 @@ pub const Command = struct {
}
};
- break :brk std.fs.cwd().openFileZ(file_pathZ, .{ .read = true });
+ break :brk std.fs.cwd().openFileZ(file_pathZ, .{ .mode = .read_only });
} else {
var path_buf: [_global.MAX_PATH_BYTES]u8 = undefined;
const cwd = std.os.getcwd(&path_buf) catch break :possibly_open_with_bun_js;
@@ -1113,7 +1113,7 @@ pub const Command = struct {
if (file_path.len == 0) break :possibly_open_with_bun_js;
script_name_buf[file_path.len] = 0;
var file_pathZ = script_name_buf[0..file_path.len :0];
- break :brk std.fs.openFileAbsoluteZ(file_pathZ, .{ .read = true });
+ break :brk std.fs.openFileAbsoluteZ(file_pathZ, .{ .mode = .read_only });
}
};
diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig
index 3e39ae1b3..f81832b0c 100644
--- a/src/cli/create_command.zig
+++ b/src/cli/create_command.zig
@@ -44,6 +44,7 @@ const Headers = @import("http").Headers;
const CopyFile = @import("../copy_file.zig");
var bun_path_buf: [_global.MAX_PATH_BYTES]u8 = undefined;
const Futex = @import("../futex.zig");
+const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
const target_nextjs_version = "12.1.0";
pub var initialized_store = false;
@@ -378,7 +379,7 @@ pub const CreateCommand = struct {
const destination = try filesystem.dirname_store.append([]const u8, resolve_path.joinAbs(filesystem.top_level_dir, .auto, dirname));
var progress = std.Progress{};
- var node = try progress.start(try ProgressBuf.print("Loading {s}", .{template}), 0);
+ var node = progress.start(try ProgressBuf.print("Loading {s}", .{template}), 0);
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
// alacritty is fast
@@ -616,7 +617,7 @@ pub const CreateCommand = struct {
defer outfile.close();
defer node_.completeOne();
- var infile = try entry.dir.openFile(entry.basename, .{ .read = true });
+ var infile = try entry.dir.openFile(entry.basename, .{ .mode = .read_only });
defer infile.close();
// Assumption: you only really care about making sure something that was executable is still executable
@@ -640,7 +641,7 @@ pub const CreateCommand = struct {
try FileCopier.copy(destination_dir, &walker_, node, &progress);
- package_json_file = destination_dir.openFile("package.json", .{ .read = true, .write = true }) catch null;
+ package_json_file = destination_dir.openFile("package.json", .{ .mode = .read_write }) catch null;
read_package_json: {
if (package_json_file) |pkg| {
@@ -765,7 +766,7 @@ pub const CreateCommand = struct {
var has_react_scripts = false;
const Prune = struct {
- pub const packages = std.ComptimeStringMap(void, .{
+ pub const packages = ComptimeStringMap(void, .{
.{ "@parcel/babel-preset", void{} },
.{ "@parcel/core", void{} },
.{ "@swc/cli", void{} },
@@ -1249,7 +1250,7 @@ pub const CreateCommand = struct {
var public_index_html_parts = [_]string{ destination, "public/index.html" };
var public_index_html_path = filesystem.absBuf(&public_index_html_parts, &bun_path_buf);
- const public_index_html_file = std.fs.openFileAbsolute(public_index_html_path, .{ .read = true, .write = true }) catch break :bail;
+ const public_index_html_file = std.fs.openFileAbsolute(public_index_html_path, .{ .mode = .read_write }) catch break :bail;
defer public_index_html_file.close();
const file_extensions_to_try = [_]string{ ".tsx", ".ts", ".jsx", ".js", ".mts", ".mcjs" };
@@ -1759,9 +1760,7 @@ pub const Example = struct {
var path: [:0]u8 = home_dir_buf[0 .. entry.name.len + 1 + "package.json".len :0];
- folder.accessZ(path, .{
- .read = true,
- }) catch continue :loop;
+ folder.accessZ(path, .{ .mode = .read_only }) catch continue :loop;
try examples.append(
Example{
@@ -2113,7 +2112,7 @@ pub const CreateListExamplesCommand = struct {
const time = std.time.nanoTimestamp();
var progress = std.Progress{};
- var node = try progress.start("Fetching manifest", 0);
+ var node = progress.start("Fetching manifest", 0);
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
progress.refresh();
diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig
index 23b9a9a34..385d5c40e 100644
--- a/src/cli/install_completions_command.zig
+++ b/src/cli/install_completions_command.zig
@@ -305,7 +305,7 @@ pub const InstallCompletionsCommand = struct {
std.mem.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshrc");
zshrc_filepath[zdot_dir.len + "/.zshrc".len] = 0;
var filepath = zshrc_filepath[0 .. zdot_dir.len + "/.zshrc".len :0];
- break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .read = true, .write = true }) catch break :first;
+ break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .mode = .read_write }) catch break :first;
}
}
@@ -315,7 +315,7 @@ pub const InstallCompletionsCommand = struct {
std.mem.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshrc");
zshrc_filepath[zdot_dir.len + "/.zshrc".len] = 0;
var filepath = zshrc_filepath[0 .. zdot_dir.len + "/.zshrc".len :0];
- break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .read = true, .write = true }) catch break :second;
+ break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .mode = .read_write }) catch break :second;
}
}
@@ -325,7 +325,7 @@ pub const InstallCompletionsCommand = struct {
std.mem.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshenv");
zshrc_filepath[zdot_dir.len + "/.zshenv".len] = 0;
var filepath = zshrc_filepath[0 .. zdot_dir.len + "/.zshenv".len :0];
- break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .read = true, .write = true }) catch break :third;
+ break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .mode = .read_write }) catch break :third;
}
}
diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig
index 8a8451cea..e1e4bf22c 100644
--- a/src/cli/run_command.zig
+++ b/src/cli/run_command.zig
@@ -591,7 +591,7 @@ pub const RunCommand = struct {
var file_path = script_name_to_search;
const file_: std.fs.File.OpenError!std.fs.File = brk: {
if (script_name_to_search[0] == std.fs.path.sep) {
- break :brk std.fs.openFileAbsolute(script_name_to_search, .{ .read = true });
+ break :brk std.fs.openFileAbsolute(script_name_to_search, .{ .mode = .read_only });
} else {
const cwd = std.os.getcwd(&path_buf) catch break :possibly_open_with_bun_js;
path_buf[cwd.len] = std.fs.path.sep;
@@ -605,7 +605,7 @@ pub const RunCommand = struct {
if (file_path.len == 0) break :possibly_open_with_bun_js;
path_buf2[file_path.len] = 0;
var file_pathZ = path_buf2[0..file_path.len :0];
- break :brk std.fs.openFileAbsoluteZ(file_pathZ, .{ .read = true });
+ break :brk std.fs.openFileAbsoluteZ(file_pathZ, .{ .mode = .read_only });
}
};
@@ -925,7 +925,7 @@ pub const RunCommand = struct {
if (path_for_which.len > 0) {
if (which(&path_buf, path_for_which, this_bundler.fs.top_level_dir, script_name_to_search)) |destination| {
- // var file = std.fs.openFileAbsoluteZ(destination, .{ .read = true }) catch |err| {
+ // var file = std.fs.openFileAbsoluteZ(destination, .{ .mode = .read_only }) catch |err| {
// if (!log_errors) return false;
// Output.prettyErrorln("<r>error: <red>{s}<r> opening file: \"{s}\"", .{ err, std.mem.span(destination) });
diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig
index d6e2527fe..ce05a1dfc 100644
--- a/src/cli/upgrade_command.zig
+++ b/src/cli/upgrade_command.zig
@@ -366,7 +366,7 @@ pub const UpgradeCommand = struct {
{
var refresher = std.Progress{};
- var progress = try refresher.start("Fetching version tags", 0);
+ var progress = refresher.start("Fetching version tags", 0);
version = (try getLatestVersion(ctx.allocator, &env_loader, &refresher, progress, false)) orelse return;
@@ -399,7 +399,7 @@ pub const UpgradeCommand = struct {
Output.flush();
var refresher = std.Progress{};
- var progress = try refresher.start("Downloading", version.size);
+ var progress = refresher.start("Downloading", version.size);
refresher.refresh();
var async_http = ctx.allocator.create(HTTP.AsyncHTTP) catch unreachable;
var zip_file_buffer = try ctx.allocator.create(MutableString);
diff --git a/src/comptime_string_map.zig b/src/comptime_string_map.zig
new file mode 100644
index 000000000..f6ce6db8c
--- /dev/null
+++ b/src/comptime_string_map.zig
@@ -0,0 +1,386 @@
+const std = @import("std");
+const mem = std.mem;
+const strings = @import("./string_immutable.zig");
+
+/// Comptime string map optimized for small sets of disparate string keys.
+/// Works by separating the keys by length at comptime and only checking strings of
+/// equal length at runtime.
+///
+/// `kvs` expects a list literal containing list literals or an array/slice of structs
+/// where `.@"0"` is the `[]const u8` key and `.@"1"` is the associated value of type `V`.
+/// TODO: https://github.com/ziglang/zig/issues/4335
+pub fn ComptimeStringMapWithKeyType(comptime KeyType: type, comptime V: type, comptime kvs_list: anytype) type {
+ const KV = struct {
+ key: []const KeyType,
+ value: V,
+ };
+
+ const precomputed = comptime blk: {
+ @setEvalBranchQuota(2000);
+
+ var sorted_kvs: [kvs_list.len]KV = undefined;
+ const lenAsc = (struct {
+ fn lenAsc(context: void, a: KV, b: KV) bool {
+ _ = context;
+ if (a.key.len != b.key.len) {
+ return a.key.len < b.key.len;
+ }
+ // https://stackoverflow.com/questions/11227809/why-is-processing-a-sorted-array-faster-than-processing-an-unsorted-array
+ @setEvalBranchQuota(4000);
+ return std.mem.order(KeyType, a.key, b.key) == .lt;
+ }
+ }).lenAsc;
+ if (KeyType == u8) {
+ for (kvs_list) |kv, i| {
+ if (V != void) {
+ sorted_kvs[i] = .{ .key = kv.@"0", .value = kv.@"1" };
+ } else {
+ sorted_kvs[i] = .{ .key = kv.@"0", .value = {} };
+ }
+ }
+ } else {
+ @compileError("Not implemented for this key type");
+ }
+ std.sort.sort(KV, &sorted_kvs, {}, lenAsc);
+ const min_len = sorted_kvs[0].key.len;
+ const max_len = sorted_kvs[sorted_kvs.len - 1].key.len;
+ var len_indexes: [max_len + 1]usize = undefined;
+ var len: usize = 0;
+ var i: usize = 0;
+ while (len <= max_len) : (len += 1) {
+ // find the first keyword len == len
+ while (len > sorted_kvs[i].key.len) {
+ i += 1;
+ }
+ len_indexes[len] = i;
+ }
+ break :blk .{
+ .min_len = min_len,
+ .max_len = max_len,
+ .sorted_kvs = sorted_kvs,
+ .len_indexes = len_indexes,
+ };
+ };
+
+ return struct {
+ const len_indexes = precomputed.len_indexes;
+ pub const kvs = precomputed.sorted_kvs;
+
+ pub fn has(str: []const KeyType) bool {
+ return get(str) != null;
+ }
+
+ pub fn getWithLength(str: []const KeyType, comptime len: usize) ?V {
+ const end = comptime brk: {
+ var i = len_indexes[len];
+ while (i < kvs.len and kvs[i].key.len == len) : (i += 1) {}
+ break :brk i;
+ };
+
+ comptime var i = len_indexes[len];
+
+ inline while (i < end) : (i += 1) {
+ if (strings.eqlComptimeCheckLenWithType(KeyType, str, kvs[i].key, false)) {
+ return kvs[i].value;
+ }
+ }
+
+ return null;
+ }
+
+ pub fn get(str: []const KeyType) ?V {
+ if (str.len < precomputed.min_len or str.len > precomputed.max_len)
+ return null;
+
+ comptime var i: usize = precomputed.min_len;
+ inline while (i <= precomputed.max_len) : (i += 1) {
+ if (str.len == i) {
+ return getWithLength(str, i);
+ }
+ }
+
+ return null;
+ }
+ };
+}
+
+pub fn ComptimeStringMap(comptime V: type, comptime kvs_list: anytype) type {
+ return ComptimeStringMapWithKeyType(u8, V, kvs_list);
+}
+
+pub fn ComptimeStringMap16(comptime V: type, comptime kvs_list: anytype) type {
+ return ComptimeStringMapWithKeyType(u16, V, kvs_list);
+}
+
+const TestEnum = enum {
+ A,
+ B,
+ C,
+ D,
+ E,
+};
+
+test "ComptimeStringMap list literal of list literals" {
+ const map = ComptimeStringMap(TestEnum, .{
+ .{ "these", .D },
+ .{ "have", .A },
+ .{ "nothing", .B },
+ .{ "incommon", .C },
+ .{ "samelen", .E },
+ });
+
+ try testMap(map);
+}
+
+test "ComptimeStringMap array of structs" {
+ const KV = struct {
+ @"0": []const u8,
+ @"1": TestEnum,
+ };
+ const map = ComptimeStringMap(TestEnum, [_]KV{
+ .{ .@"0" = "these", .@"1" = .D },
+ .{ .@"0" = "have", .@"1" = .A },
+ .{ .@"0" = "nothing", .@"1" = .B },
+ .{ .@"0" = "incommon", .@"1" = .C },
+ .{ .@"0" = "samelen", .@"1" = .E },
+ });
+
+ try testMap(map);
+}
+
+test "ComptimeStringMap slice of structs" {
+ const KV = struct {
+ @"0": []const u8,
+ @"1": TestEnum,
+ };
+ const slice: []const KV = &[_]KV{
+ .{ .@"0" = "these", .@"1" = .D },
+ .{ .@"0" = "have", .@"1" = .A },
+ .{ .@"0" = "nothing", .@"1" = .B },
+ .{ .@"0" = "incommon", .@"1" = .C },
+ .{ .@"0" = "samelen", .@"1" = .E },
+ };
+ const map = ComptimeStringMap(TestEnum, slice);
+
+ try testMap(map);
+}
+
+fn testMap(comptime map: anytype) !void {
+ try std.testing.expectEqual(TestEnum.A, map.get("have").?);
+ try std.testing.expectEqual(TestEnum.B, map.get("nothing").?);
+ try std.testing.expect(null == map.get("missing"));
+ try std.testing.expectEqual(TestEnum.D, map.get("these").?);
+ try std.testing.expectEqual(TestEnum.E, map.get("samelen").?);
+
+ try std.testing.expect(!map.has("missing"));
+ try std.testing.expect(map.has("these"));
+}
+
+test "ComptimeStringMap void value type, slice of structs" {
+ const KV = struct {
+ @"0": []const u8,
+ };
+ const slice: []const KV = &[_]KV{
+ .{ .@"0" = "these" },
+ .{ .@"0" = "have" },
+ .{ .@"0" = "nothing" },
+ .{ .@"0" = "incommon" },
+ .{ .@"0" = "samelen" },
+ };
+ const map = ComptimeStringMap(void, slice);
+
+ try testSet(map);
+}
+
+test "ComptimeStringMap void value type, list literal of list literals" {
+ const map = ComptimeStringMap(void, .{
+ .{"these"},
+ .{"have"},
+ .{"nothing"},
+ .{"incommon"},
+ .{"samelen"},
+ });
+
+ try testSet(map);
+}
+
+fn testSet(comptime map: anytype) !void {
+ try std.testing.expectEqual({}, map.get("have").?);
+ try std.testing.expectEqual({}, map.get("nothing").?);
+ try std.testing.expect(null == map.get("missing"));
+ try std.testing.expectEqual({}, map.get("these").?);
+ try std.testing.expectEqual({}, map.get("samelen").?);
+
+ try std.testing.expect(!map.has("missing"));
+ try std.testing.expect(map.has("these"));
+}
+
+const TestEnum2 = enum {
+ A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T,
+ U,
+ V,
+ W,
+ X,
+ Y,
+ FZ,
+ FA,
+ FB,
+ FC,
+ FD,
+ FE,
+ FF,
+ FG,
+ FH,
+ FI,
+ FJ,
+ FK,
+ FL,
+
+ pub const map = ComptimeStringMap(TestEnum2, .{
+ .{ "these", .A },
+ .{ "have", .B },
+ .{ "nothing", .C },
+ .{ "nothinz", .D },
+ .{ "nothinc", .E },
+ .{ "nothina", .F },
+ .{ "nothinb", .G },
+ .{ "nothiaa", .H },
+ .{ "nothaaa", .I },
+ .{ "notaaaa", .J },
+ .{ "noaaaaa", .K },
+ .{ "naaaaaa", .L },
+ .{ "incommon", .M },
+ .{ "ancommon", .N },
+ .{ "ab1ommon", .O },
+ .{ "ab2ommon", .P },
+ .{ "ab3ommon", .Q },
+ .{ "ab4ommon", .R },
+ .{ "ab5ommon", .S },
+ .{ "ab6ommon", .T },
+ .{ "ab7ommon", .U },
+ .{ "ab8ommon", .V },
+ .{ "ab9ommon", .W },
+ .{ "abAommon", .X },
+ .{ "abBommon", .Y },
+ .{ "abCommon", .FZ },
+ .{ "abZommon", .FA },
+ .{ "abEommon", .FB },
+ .{ "abFommon", .FC },
+ .{ "ab10omon", .FD },
+ .{ "ab11omon", .FE },
+ .{ "ab12omon", .FF },
+ .{ "ab13omon", .FG },
+ .{ "ab14omon", .FH },
+ .{ "ab15omon", .FI },
+ .{ "ab16omon", .FJ },
+ .{ "ab16omon1", .FH },
+ .{ "samelen", .FK },
+ .{ "0", .FL },
+ .{ "00", .FL },
+ });
+
+ pub const official = std.ComptimeStringMap(TestEnum2, .{
+ .{ "these", .A },
+ .{ "have", .B },
+ .{ "naaaaaa", .L },
+ .{ "noaaaaa", .K },
+ .{ "notaaaa", .J },
+ .{ "nothaaa", .I },
+ .{ "nothiaa", .H },
+ .{ "nothina", .F },
+ .{ "nothinb", .G },
+ .{ "nothinc", .E },
+ .{ "nothing", .C },
+ .{ "nothinz", .D },
+ .{ "incommon", .M },
+ .{ "ancommon", .N },
+ .{ "ab1ommon", .O },
+ .{ "ab2ommon", .P },
+ .{ "ab3ommon", .Q },
+ .{ "ab4ommon", .R },
+ .{ "ab5ommon", .S },
+ .{ "ab6ommon", .T },
+ .{ "ab7ommon", .U },
+ .{ "ab8ommon", .V },
+ .{ "ab9ommon", .W },
+ .{ "abAommon", .X },
+ .{ "abBommon", .Y },
+ .{ "abCommon", .FZ },
+ .{ "abZommon", .FA },
+ .{ "abEommon", .FB },
+ .{ "abFommon", .FC },
+ .{ "ab10omon", .FD },
+ .{ "ab11omon", .FE },
+ .{ "ab12omon", .FF },
+ .{ "ab13omon", .FG },
+ .{ "ab14omon", .FH },
+ .{ "ab15omon", .FI },
+ .{ "ab16omon", .FJ },
+ .{ "samelen", .FK },
+ .{ "ab16omon1", .FH },
+ .{ "0", .FL },
+ .{ "00", .FL },
+ });
+};
+
+pub fn compareString(input: []const u8) !void {
+ var str = try std.heap.page_allocator.dupe(u8, input);
+ if (TestEnum2.map.has(str) != TestEnum2.official.has(str)) {
+ std.debug.panic("{s} - TestEnum2.map.has(str) ({d}) != TestEnum2.official.has(str) ({d})", .{
+ str,
+ @boolToInt(TestEnum2.map.has(str)),
+ @boolToInt(TestEnum2.official.has(str)),
+ });
+ }
+
+ std.debug.print("For string: \"{s}\" (has a match? {d})\n", .{ str, @boolToInt(TestEnum2.map.has(str)) });
+
+ var i: usize = 0;
+ var is_eql = false;
+ var timer = try std.time.Timer.start();
+
+ while (i < 99999999) : (i += 1) {
+ is_eql = @call(.{ .modifier = .never_inline }, TestEnum2.map.has, .{str});
+ }
+ const new = timer.lap();
+
+ std.debug.print("- new {}\n", .{std.fmt.fmtDuration(new)});
+
+ i = 0;
+ while (i < 99999999) : (i += 1) {
+ is_eql = @call(.{ .modifier = .never_inline }, TestEnum2.official.has, .{str});
+ }
+
+ const _std = timer.lap();
+
+ std.debug.print("- std {}\n\n", .{std.fmt.fmtDuration(_std)});
+}
+
+pub fn main() anyerror!void {
+ try compareString("naaaaaa");
+ try compareString("nothinz");
+ try compareString("these");
+ try compareString("incommon");
+ try compareString("noMatch");
+ try compareString("0");
+ try compareString("00");
+}
diff --git a/src/css_scanner.zig b/src/css_scanner.zig
index 059f7f23a..187f1310b 100644
--- a/src/css_scanner.zig
+++ b/src/css_scanner.zig
@@ -1267,7 +1267,7 @@ pub fn NewBundler(
const watcher_index = this.watcher.indexOf(hash);
if (watcher_index == null) {
- var file = try std.fs.openFileAbsolute(absolute_path, .{ .read = true });
+ var file = try std.fs.openFileAbsolute(absolute_path, .{ .mode = .read_only });
try this.watcher.appendFile(file.handle, absolute_path, hash, .css, 0, null, true);
if (this.watcher.watchloop_handle == null) {
diff --git a/src/deps/zig-clap/clap/args.zig b/src/deps/zig-clap/clap/args.zig
index c4b6c154e..6f98e33a6 100644
--- a/src/deps/zig-clap/clap/args.zig
+++ b/src/deps/zig-clap/clap/args.zig
@@ -46,7 +46,7 @@ test "SliceIterator" {
/// An argument iterator which wraps the ArgIterator in ::std.
/// On windows, this iterator allocates.
pub const OsIterator = struct {
- const Error = process.ArgIterator.NextError;
+ const Error = process.ArgIterator.InitError;
arena: heap.ArenaAllocator,
args: process.ArgIterator,
@@ -62,7 +62,7 @@ pub const OsIterator = struct {
.args = process.args(),
.exe_arg = undefined,
};
- res.exe_arg = try res.next();
+ res.exe_arg = res.next();
return res;
}
@@ -70,12 +70,8 @@ pub const OsIterator = struct {
iter.arena.deinit();
}
- pub fn next(iter: *OsIterator) Error!?[:0]const u8 {
- if (builtin.os.tag == .windows) {
- return try iter.args.next(&iter.arena.allocator) orelse return null;
- } else {
- return iter.args.nextPosix();
- }
+ pub fn next(iter: *OsIterator) ?[:0]const u8 {
+ return iter.args.next();
}
};
diff --git a/src/deps/zig-clap/clap/streaming.zig b/src/deps/zig-clap/clap/streaming.zig
index 702e9e6d5..e3948e33b 100644
--- a/src/deps/zig-clap/clap/streaming.zig
+++ b/src/deps/zig-clap/clap/streaming.zig
@@ -49,7 +49,7 @@ pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
.chaining => |state| return try parser.chainging(state),
.rest_are_positional => {
const param = parser.positionalParam() orelse unreachable;
- const value = (try parser.iter.next()) orelse return null;
+ const value = parser.iter.next() orelse return null;
return Arg(Id){ .param = param, .value = value };
},
}
@@ -82,7 +82,7 @@ pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
if (maybe_value) |v|
break :blk v;
- break :blk (try parser.iter.next()) orelse brk2: {
+ break :blk parser.iter.next() orelse brk2: {
if (param.takes_value != .one_optional)
return parser.err(arg, .{ .long = name }, error.MissingValue);
@@ -105,7 +105,7 @@ pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
// arguments.
if (mem.eql(u8, arg, "--")) {
parser.state = .rest_are_positional;
- const value = (try parser.iter.next()) orelse return null;
+ const value = parser.iter.next() orelse return null;
return Arg(Id){ .param = param, .value = value };
}
@@ -148,7 +148,7 @@ pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
}
if (arg.len <= next_index) {
- const value = (try parser.iter.next()) orelse
+ const value = parser.iter.next() orelse
return parser.err(arg, .{ .short = short }, error.MissingValue);
return Arg(Id){ .param = param, .value = value };
@@ -190,7 +190,7 @@ pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
};
fn parseNextArg(parser: *@This()) !?ArgInfo {
- const full_arg = (try parser.iter.next()) orelse return null;
+ const full_arg = parser.iter.next() orelse return null;
if (mem.eql(u8, full_arg, "--") or mem.eql(u8, full_arg, "-"))
return ArgInfo{ .arg = full_arg, .kind = .positional };
if (mem.startsWith(u8, full_arg, "--"))
diff --git a/src/env_loader.zig b/src/env_loader.zig
index b75d47cec..8befe4ec1 100644
--- a/src/env_loader.zig
+++ b/src/env_loader.zig
@@ -870,7 +870,7 @@ pub const Loader = struct {
return;
}
- var file = dir.openFile(base, .{ .read = true }) catch |err| {
+ var file = dir.openFile(base, .{ .mode = .read_only }) catch |err| {
switch (err) {
error.FileNotFound => {
// prevent retrying
diff --git a/src/fallback.version b/src/fallback.version
index 82270a367..cc80fd43a 100644
--- a/src/fallback.version
+++ b/src/fallback.version
@@ -1 +1 @@
-d570fd92dbb9dbbb \ No newline at end of file
+3d00849be45e12c2 \ No newline at end of file
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index d61decd06..53456928e 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -1,6 +1,6 @@
const env = @import("env.zig");
pub const strong_etags_for_built_files = true;
-pub const keep_alive = true;
+pub const keep_alive = false;
// it just doesn't work well.
pub const use_std_path_relative = false;
diff --git a/src/fs.zig b/src/fs.zig
index 19af52aeb..d5a100448 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -592,12 +592,12 @@ pub const FileSystem = struct {
inline fn _fetchCacheFile(fs: *RealFS, basename: string) !std.fs.File {
var parts = [_]string{ "node_modules", ".cache", basename };
var path = fs.parent_fs.join(&parts);
- return std.fs.cwd().openFile(path, .{ .write = true, .read = true, .lock = .Shared }) catch {
+ return std.fs.cwd().openFile(path, .{ .mode = .read_write, .lock = .Shared }) catch {
path = fs.parent_fs.join(parts[0..2]);
try std.fs.cwd().makePath(path);
path = fs.parent_fs.join(&parts);
- return try std.fs.cwd().createFile(path, .{ .read = true, .lock = .Shared });
+ return try std.fs.cwd().createFile(path, .{ .mode = .read_write, .lock = .Shared });
};
}
@@ -733,7 +733,7 @@ pub const FileSystem = struct {
}
pub fn modKey(fs: *RealFS, path: string) anyerror!ModKey {
- var file = try std.fs.openFileAbsolute(path, std.fs.File.OpenFlags{ .read = true });
+ var file = try std.fs.openFileAbsolute(path, std.fs.File.OpenFlags{ .mode = .read_only });
defer {
if (fs.needToCloseFiles()) {
file.close();
@@ -948,7 +948,7 @@ pub const FileSystem = struct {
var symlink: []const u8 = "";
if (is_symlink) {
- var file = if (existing_fd != 0) std.fs.File{ .handle = existing_fd } else try std.fs.openFileAbsoluteZ(absolute_path_c, .{ .read = true });
+ var file = if (existing_fd != 0) std.fs.File{ .handle = existing_fd } else try std.fs.openFileAbsoluteZ(absolute_path_c, .{ .mode = .read_only });
setMaxFd(file.handle);
defer {
diff --git a/src/global.zig b/src/global.zig
index bd287abfb..ad4e4afd3 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -13,6 +13,7 @@ pub const C = @import("c.zig");
pub const FeatureFlags = @import("feature_flags.zig");
const root = @import("root");
pub const meta = @import("./meta.zig");
+pub const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
pub const Output = struct {
// These are threadlocal so we don't have stdout/stderr writing on top of each other
@@ -46,7 +47,7 @@ pub const Output = struct {
if (comptime Environment.isWasm)
return StreamType;
- return std.io.BufferedWriter(4096, @typeInfo(std.meta.declarationInfo(StreamType, "writer").data.Fn.fn_type).Fn.return_type.?);
+ return std.io.BufferedWriter(4096, @TypeOf(StreamType.writer(undefined)));
}
}.getBufferedStream();
@@ -204,7 +205,7 @@ pub const Output = struct {
}
}
- pub const WriterType: type = @typeInfo(std.meta.declarationInfo(Source.StreamType, "writer").data.Fn.fn_type).Fn.return_type.?;
+ pub const WriterType: type = @TypeOf(Source.StreamType.writer(undefined));
pub fn errorWriter() WriterType {
std.debug.assert(source_set);
@@ -355,7 +356,7 @@ pub const Output = struct {
// </r> - reset
// <r> - reset
pub const ED = "\x1b[";
- pub const color_map = std.ComptimeStringMap(string, .{
+ pub const color_map = ComptimeStringMap(string, .{
&.{ "black", ED ++ "30m" },
&.{ "blue", ED ++ "34m" },
&.{ "b", ED ++ "1m" },
diff --git a/src/http.zig b/src/http.zig
index 168982077..45d56a70a 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -522,7 +522,7 @@ pub const RequestContext = struct {
var absolute_path = resolve_path.joinAbs(this.bundler.options.routes.static_dir, .auto, relative_unrooted_path);
if (stat.kind == .SymLink) {
- file.* = std.fs.openFileAbsolute(absolute_path, .{ .read = true }) catch return null;
+ file.* = std.fs.openFileAbsolute(absolute_path, .{ .mode = .read_only }) catch return null;
absolute_path = std.os.getFdPath(
file.handle,
@@ -1800,8 +1800,11 @@ pub const RequestContext = struct {
reloader = Api.Reloader.fast_refresh;
}
}
+
const welcome_message = Api.WebsocketMessageWelcome{
- .epoch = WebsocketHandler.toTimestamp(handler.ctx.timer.start_time),
+ .epoch = WebsocketHandler.toTimestamp(
+ @intCast(u64, (handler.ctx.timer.started.timestamp.tv_sec * std.time.ns_per_s)) + @intCast(u64, handler.ctx.timer.started.timestamp.tv_nsec),
+ ),
.javascript_reloader = reloader,
.cwd = handler.ctx.bundler.fs.top_level_dir,
};
@@ -1873,7 +1876,7 @@ pub const RequestContext = struct {
var path_buf = _global.constStrToU8(file_path);
path_buf.ptr[path_buf.len] = 0;
var file_path_z: [:0]u8 = path_buf.ptr[0..path_buf.len :0];
- const file = std.fs.openFileAbsoluteZ(file_path_z, .{ .read = true }) catch |err| {
+ const file = std.fs.openFileAbsoluteZ(file_path_z, .{ .mode = .read_only }) catch |err| {
Output.prettyErrorln("<r><red>ERR:<r>{s} opening file <b>{s}<r> <r>", .{ @errorName(err), full_build.file_path });
continue;
};
@@ -2794,7 +2797,7 @@ pub const RequestContext = struct {
const fd = if (resolve_result.file_fd != 0)
resolve_result.file_fd
else brk: {
- var file = std.fs.openFileAbsoluteZ(path.textZ(), .{ .read = true }) catch |err| {
+ var file = std.fs.openFileAbsoluteZ(path.textZ(), .{ .mode = .read_only }) catch |err| {
Output.prettyErrorln("Failed to open {s} due to error {s}", .{ path.text, @errorName(err) });
return try ctx.sendInternalError(err);
};
diff --git a/src/http/mime_type.zig b/src/http/mime_type.zig
index 5e9c36b7b..14df12089 100644
--- a/src/http/mime_type.zig
+++ b/src/http/mime_type.zig
@@ -11,9 +11,7 @@ const default_allocator = _global.default_allocator;
const C = _global.C;
const Loader = @import("../options.zig").Loader;
-const Two = strings.ExactSizeMatcher(2);
-const Four = strings.ExactSizeMatcher(4);
-const Eight = strings.ExactSizeMatcher(8);
+const ComptimeStringMap = _global.ComptimeStringMap;
const MimeType = @This();
@@ -155,69 +153,50 @@ pub fn byLoader(loader: Loader, ext: string) MimeType {
}
}
+const extensions = ComptimeStringMap(MimeType, .{
+ .{ "bun", javascript },
+ .{ "jsx", javascript },
+ .{ "js", javascript },
+ .{ "css", css },
+ .{ "jpg", MimeType.initComptime("image/jpeg", .image) },
+ .{ "gif", MimeType.initComptime("image/gif", .image) },
+ .{ "png", MimeType.initComptime("image/png", .image) },
+ .{ "bmp", MimeType.initComptime("image/bmp", .image) },
+ .{ "wav", MimeType.initComptime("audio/wave", .audio) },
+ .{ "aac", MimeType.initComptime("audio/aic", .audio) },
+ .{ "mp4", MimeType.initComptime("video/mp4", .video) },
+ .{ "htm", MimeType.initComptime("text/html;charset=utf-8", .html) },
+ .{ "xml", MimeType.initComptime("text/xml", .other) },
+ .{ "zip", MimeType.initComptime("application/zip", .other) },
+ .{ "txt", MimeType.initComptime("text/plain", .other) },
+ .{ "ttf", MimeType.initComptime("font/ttf", .font) },
+ .{ "otf", MimeType.initComptime("font/otf", .font) },
+ .{ "ico", ico },
+ .{ "mp3", MimeType.initComptime("audio/mpeg", .video) },
+ .{ "svg", MimeType.initComptime("image/svg+xml", .image) },
+ .{ "csv", MimeType.initComptime("text/csv", .other) },
+ .{ "mid", MimeType.initComptime("audio/mid", .audio) },
+ .{ "mid", MimeType.initComptime("audio/mid", .audio) },
+ .{ "json", MimeType.json },
+ .{ "jpeg", MimeType.initComptime("image/jpeg", .image) },
+ .{ "aiff", MimeType.initComptime("image/png", .image) },
+ .{ "tiff", MimeType.initComptime("image/tiff", .image) },
+ .{ "html", MimeType.html },
+ .{
+ "wasm", MimeType.initComptime(
+ "application/wasm",
+ .wasm,
+ ),
+ },
+ .{ "woff", MimeType.initComptime("font/woff", .font) },
+ .{ "webm", MimeType.initComptime("video/webm", .video) },
+ .{ "webp", MimeType.initComptime("image/webp", .image) },
+ .{ "midi", MimeType.initComptime("audio/midi", .audio) },
+ .{ "woff2", MimeType.initComptime("font/woff2", .font) },
+ .{ "xhtml", MimeType.initComptime("application/xhtml+xml;charset=utf-8", .html) },
+});
+
// TODO: improve this
pub fn byExtension(ext: string) MimeType {
- return switch (ext.len) {
- 2 => {
- return switch (std.mem.readIntNative(u16, ext[0..2])) {
- Two.case("js") => javascript,
- else => MimeType.other,
- };
- },
- 3 => {
- const four = [4]u8{ ext[0], ext[1], ext[2], 0 };
- return switch (std.mem.readIntNative(u32, &four)) {
- Four.case("bun") => javascript,
-
- Four.case("css") => css,
- Four.case("jpg") => MimeType.initComptime("image/jpeg", .image),
- Four.case("gif") => MimeType.initComptime("image/gif", .image),
- Four.case("png") => MimeType.initComptime("image/png", .image),
- Four.case("bmp") => MimeType.initComptime("image/bmp", .image),
- Four.case("jsx"), Four.case("mjs") => MimeType.javascript,
- Four.case("wav") => MimeType.initComptime("audio/wave", .audio),
- Four.case("aac") => MimeType.initComptime("audio/aic", .audio),
- Four.case("mp4") => MimeType.initComptime("video/mp4", .video),
- Four.case("htm") => MimeType.initComptime("text/html;charset=utf-8", .html),
- Four.case("xml") => MimeType.initComptime("text/xml", .other),
- Four.case("zip") => MimeType.initComptime("application/zip", .other),
- Four.case("txt") => MimeType.initComptime("text/plain", .other),
- Four.case("ttf") => MimeType.initComptime("font/ttf", .font),
- Four.case("otf") => MimeType.initComptime("font/otf", .font),
- Four.case("ico") => ico,
- Four.case("mp3") => MimeType.initComptime("audio/mpeg", .video),
- Four.case("svg") => MimeType.initComptime("image/svg+xml", .image),
- Four.case("csv") => MimeType.initComptime("text/csv", .other),
- Four.case("mid") => MimeType.initComptime("audio/mid", .audio),
- else => MimeType.other,
- };
- },
- 4 => {
- return switch (Four.match(ext)) {
- Four.case("json") => MimeType.json,
- Four.case("jpeg") => MimeType.initComptime("image/jpeg", .image),
- Four.case("aiff") => MimeType.initComptime("image/png", .image),
- Four.case("tiff") => MimeType.initComptime("image/tiff", .image),
- Four.case("html") => MimeType.html,
- Four.case("wasm") => MimeType.initComptime(
- "application/wasm",
- .wasm,
- ),
- Four.case("woff") => MimeType.initComptime("font/woff", .font),
- Four.case("webm") => MimeType.initComptime("video/webm", .video),
- Four.case("webp") => MimeType.initComptime("image/webp", .image),
- Four.case("midi") => MimeType.initComptime("audio/midi", .audio),
- else => MimeType.other,
- };
- },
- 5 => {
- const eight = [8]u8{ ext[0], ext[1], ext[2], ext[3], ext[4], 0, 0, 0 };
- return switch (std.mem.readIntNative(u64, &eight)) {
- Eight.case("woff2") => MimeType.initComptime("font/woff2", .font),
- Eight.case("xhtml") => MimeType.initComptime("application/xhtml+xml;charset=utf-8", .html),
- else => MimeType.other,
- };
- },
- else => MimeType.other,
- };
+ return extensions.get(ext) orelse MimeType.other;
}
diff --git a/src/identity_context.zig b/src/identity_context.zig
index 299907b34..7bcfa3dbc 100644
--- a/src/identity_context.zig
+++ b/src/identity_context.zig
@@ -17,7 +17,7 @@ pub const ArrayIdentityContext = struct {
return key;
}
- pub fn eql(_: @This(), a: u32, b: u32) bool {
+ pub fn eql(_: @This(), a: u32, b: u32, _: usize) bool {
return a == b;
}
};
diff --git a/src/install/install.zig b/src/install/install.zig
index ea555eb5d..1e04a8932 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -694,7 +694,7 @@ const PackageInstall = struct {
var package_json_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len :0];
defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0;
- var package_json_file = this.destination_dir.openFileZ(package_json_path, .{ .read = true }) catch return false;
+ var package_json_file = this.destination_dir.openFileZ(package_json_path, .{ .mode = .read_only }) catch return false;
defer package_json_file.close();
var body_pool = Npm.Registry.BodyPool.get(allocator);
@@ -944,7 +944,7 @@ const PackageInstall = struct {
};
defer outfile.close();
- var infile = try entry.dir.openFile(entry.basename, .{ .read = true });
+ var infile = try entry.dir.openFile(entry.basename, .{ .mode = .read_only });
defer infile.close();
const stat = infile.stat() catch continue;
@@ -3176,7 +3176,7 @@ pub const PackageManager = struct {
} else {
// can't use orelse due to a stage1 bug
- package_json_file = std.fs.cwd().openFileZ("package.json", .{ .read = true, .write = true }) catch brk: {
+ package_json_file = std.fs.cwd().openFileZ("package.json", .{ .mode = .read_write }) catch brk: {
var this_cwd = original_cwd;
outer: while (std.fs.path.dirname(this_cwd)) |parent| {
cwd_buf[parent.len] = 0;
@@ -3188,7 +3188,7 @@ pub const PackageManager = struct {
return err;
};
- break :brk std.fs.cwd().openFileZ("package.json", .{ .read = true, .write = true }) catch {
+ break :brk std.fs.cwd().openFileZ("package.json", .{ .mode = .read_write }) catch {
this_cwd = parent;
continue :outer;
};
@@ -3612,9 +3612,7 @@ pub const PackageManager = struct {
switch (err) {
error.MissingPackageJSON => {
if (op == .add or op == .update) {
- var package_json_file = std.fs.cwd().createFileZ("package.json", .{
- .read = true,
- }) catch |err2| {
+ var package_json_file = std.fs.cwd().createFileZ("package.json", .{ .read = true }) catch |err2| {
Output.prettyErrorln("<r><red>error:<r> {s} create package.json", .{@errorName(err2)});
Global.crash();
};
@@ -3989,7 +3987,7 @@ pub const PackageManager = struct {
node_modules_buf[entry.name.len] = 0;
var buf: [:0]u8 = node_modules_buf[0..entry.name.len :0];
- var file = node_modules_bin.openFileZ(buf, .{ .read = true }) catch {
+ var file = node_modules_bin.openFileZ(buf, .{ .mode = .read_only }) catch {
node_modules_bin.deleteFileZ(buf) catch {};
continue :iterator;
};
@@ -4285,7 +4283,7 @@ pub const PackageManager = struct {
var progress = &this.progress;
if (comptime log_level.showProgress()) {
- root_node = try progress.start("", 0);
+ root_node = progress.start("", 0);
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
download_node = root_node.start(ProgressStrings.download(), 0);
@@ -4781,7 +4779,7 @@ pub const PackageManager = struct {
}
if (comptime log_level.showProgress()) {
- manager.downloads_node = try manager.progress.start(ProgressStrings.download(), 0);
+ manager.downloads_node = manager.progress.start(ProgressStrings.download(), 0);
manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
manager.setNodeName(manager.downloads_node.?, ProgressStrings.download_no_emoji_, ProgressStrings.download_emoji, true);
manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks + manager.extracted_count);
@@ -4879,7 +4877,7 @@ pub const PackageManager = struct {
var node: *Progress.Node = undefined;
if (comptime log_level.showProgress()) {
- node = try manager.progress.start(ProgressStrings.save(), 0);
+ node = manager.progress.start(ProgressStrings.save(), 0);
manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
node.activate();
@@ -4910,7 +4908,7 @@ pub const PackageManager = struct {
if (manager.options.do.save_yarn_lock) {
var node: *Progress.Node = undefined;
if (comptime log_level.showProgress()) {
- node = try manager.progress.start("Saving yarn.lock", 0);
+ node = manager.progress.start("Saving yarn.lock", 0);
manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
manager.progress.refresh();
}
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index e4f9bda78..382f759da 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -136,7 +136,7 @@ pub fn loadFromDisk(this: *Lockfile, allocator: std.mem.Allocator, log: *logger.
var file = std.io.getStdIn();
if (filename.len > 0)
- file = std.fs.cwd().openFileZ(filename, .{ .read = true }) catch |err| {
+ file = std.fs.cwd().openFileZ(filename, .{ .mode = .read_only }) catch |err| {
return switch (err) {
error.FileNotFound, error.AccessDenied, error.BadPathName => LoadFromDiskResult{ .not_found = .{} },
else => LoadFromDiskResult{ .err = .{ .step = .open_file, .value = err } },
diff --git a/src/install/npm.zig b/src/install/npm.zig
index 44cf0d4ed..2dcc94686 100644
--- a/src/install/npm.zig
+++ b/src/install/npm.zig
@@ -30,6 +30,7 @@ const VersionSlice = @import("./install.zig").VersionSlice;
const ObjectPool = @import("../pool.zig").ObjectPool;
const Api = @import("../api/schema.zig").Api;
const DotEnv = @import("../env_loader.zig");
+const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
const Npm = @This();
@@ -267,7 +268,16 @@ pub const OperatingSystem = enum(u16) {
}
}
- const Matcher = strings.ExactSizeMatcher(8);
+ const NameMap = ComptimeStringMap(u16, .{
+ .{ "aix", aix },
+ .{ "darwin", darwin },
+ .{ "freebsd", freebsd },
+ .{ "linux", linux },
+ .{ "openbsd", openbsd },
+ .{ "sunos", sunos },
+ .{ "win32", win32 },
+ .{ "android", android },
+ });
pub fn apply(this_: OperatingSystem, str: []const u8) OperatingSystem {
if (str.len == 0) {
@@ -277,19 +287,8 @@ pub const OperatingSystem = enum(u16) {
const is_not = str[0] == '!';
const offset: usize = if (str[0] == '!') 1 else 0;
- const input = str[offset..];
- const field: u16 = switch (Matcher.match(input)) {
- Matcher.case("aix") => aix,
- Matcher.case("darwin") => darwin,
- Matcher.case("freebsd") => freebsd,
- Matcher.case("linux") => linux,
- Matcher.case("openbsd") => openbsd,
- Matcher.case("sunos") => sunos,
- Matcher.case("win32") => win32,
- Matcher.case("android") => android,
- else => return this_,
- };
+ const field: u16 = NameMap.get(str[offset..]) orelse return this_;
if (is_not) {
return @intToEnum(OperatingSystem, this & ~field);
@@ -320,6 +319,20 @@ pub const Architecture = enum(u16) {
pub const all_value: u16 = arm | arm64 | ia32 | mips | mipsel | ppc | ppc64 | s390 | s390x | x32 | x64;
+ const NameMap = ComptimeStringMap(u16, .{
+ .{ "arm", arm },
+ .{ "arm64", arm64 },
+ .{ "ia32", ia32 },
+ .{ "mips", mips },
+ .{ "mipsel", mipsel },
+ .{ "ppc", ppc },
+ .{ "ppc64", ppc64 },
+ .{ "s390", s390 },
+ .{ "s390x", s390x },
+ .{ "x32", x32 },
+ .{ "x64", x64 },
+ });
+
pub fn isMatch(this: Architecture) bool {
if (comptime Environment.isAarch64) {
return (@enumToInt(this) & arm64) != 0;
@@ -330,8 +343,6 @@ pub const Architecture = enum(u16) {
}
}
- const Matcher = strings.ExactSizeMatcher(8);
-
pub fn apply(this_: Architecture, str: []const u8) Architecture {
if (str.len == 0) {
return this_;
@@ -342,20 +353,7 @@ pub const Architecture = enum(u16) {
const offset: usize = if (str[0] == '!') 1 else 0;
const input = str[offset..];
- const field: u16 = switch (Matcher.match(input)) {
- Matcher.case("arm") => arm,
- Matcher.case("arm64") => arm64,
- Matcher.case("ia32") => ia32,
- Matcher.case("mips") => mips,
- Matcher.case("mipsel") => mipsel,
- Matcher.case("ppc") => ppc,
- Matcher.case("ppc64") => ppc64,
- Matcher.case("s390") => s390,
- Matcher.case("s390x") => s390x,
- Matcher.case("x32") => x32,
- Matcher.case("x64") => x64,
- else => return this_,
- };
+ const field: u16 = NameMap.get(input) orelse return this_;
if (is_not) {
return @intToEnum(Architecture, this & ~field);
@@ -570,9 +568,7 @@ pub const PackageManifest = struct {
var file_path = try std.fmt.bufPrintZ(&file_path_buf, "{x}.npm", .{file_id});
var cache_file = cache_dir.openFileZ(
file_path,
- .{
- .read = true,
- },
+ .{ .mode = .read_only },
) catch return null;
var timer: std.time.Timer = undefined;
if (PackageManager.verbose_install) {
diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig
index d988dc05a..59cddf206 100644
--- a/src/install/resolvers/folder_resolver.zig
+++ b/src/install/resolvers/folder_resolver.zig
@@ -78,7 +78,7 @@ pub const FolderResolution = union(Tag) {
joined[abs.len] = 0;
var joinedZ: [:0]u8 = joined[0..abs.len :0];
- var package_json: std.fs.File = std.fs.cwd().openFileZ(joinedZ, .{ .read = true }) catch |err| {
+ var package_json: std.fs.File = std.fs.cwd().openFileZ(joinedZ, .{ .mode = .read_only }) catch |err| {
entry.value_ptr.* = .{ .err = err };
return entry.value_ptr.*;
};
diff --git a/src/javascript/jsc/bindings/exports.zig b/src/javascript/jsc/bindings/exports.zig
index 52d1a224a..ba05930d0 100644
--- a/src/javascript/jsc/bindings/exports.zig
+++ b/src/javascript/jsc/bindings/exports.zig
@@ -828,14 +828,26 @@ pub const ZigConsoleClient = struct {
Output.enable_ansi_colors_stderr
else
Output.enable_ansi_colors_stdout;
+
var buffered_writer = if (level == .Warning or level == .Error)
console.error_writer
else
console.writer;
var writer = buffered_writer.writer();
- const BufferedWriterType = @TypeOf(writer);
- format(level, global, vals, len, BufferedWriterType, writer, enable_colors, true);
+ const Writer = @TypeOf(writer);
+ format(
+ level,
+ global,
+ vals,
+ len,
+ @TypeOf(buffered_writer.unbuffered_writer.context),
+ Writer,
+ writer,
+ enable_colors,
+ true,
+ true,
+ );
}
pub fn format(
@@ -843,10 +855,12 @@ pub const ZigConsoleClient = struct {
global: *JSGlobalObject,
vals: [*]const JSValue,
len: usize,
- comptime BufferedWriterType: type,
- writer: BufferedWriterType,
+ comptime RawWriter: type,
+ comptime Writer: type,
+ writer: Writer,
enable_colors: bool,
add_newline: bool,
+ flush: bool,
) void {
var fmt: Formatter = undefined;
defer {
@@ -861,8 +875,10 @@ pub const ZigConsoleClient = struct {
fmt = Formatter{ .remaining_values = &[_]JSValue{} };
const tag = Formatter.Tag.get(vals[0], global);
- var unbuffered_writer = writer.context.unbuffered_writer.context.writer();
- const UnbufferedWriterType = @TypeOf(unbuffered_writer);
+ var unbuffered_writer = if (comptime Writer != RawWriter)
+ writer.context.unbuffered_writer.context.writer()
+ else
+ writer;
if (tag.tag == .String) {
if (enable_colors) {
@@ -871,7 +887,7 @@ pub const ZigConsoleClient = struct {
}
fmt.format(
tag,
- UnbufferedWriterType,
+ @TypeOf(unbuffered_writer),
unbuffered_writer,
vals[0],
global,
@@ -883,7 +899,7 @@ pub const ZigConsoleClient = struct {
} else {
fmt.format(
tag,
- UnbufferedWriterType,
+ @TypeOf(unbuffered_writer),
unbuffered_writer,
vals[0],
global,
@@ -892,11 +908,15 @@ pub const ZigConsoleClient = struct {
}
if (add_newline) _ = unbuffered_writer.write("\n") catch 0;
} else {
- defer writer.context.flush() catch {};
+ defer {
+ if (comptime Writer != RawWriter) {
+ if (flush) writer.context.flush() catch {};
+ }
+ }
if (enable_colors) {
fmt.format(
tag,
- BufferedWriterType,
+ Writer,
writer,
vals[0],
global,
@@ -905,7 +925,7 @@ pub const ZigConsoleClient = struct {
} else {
fmt.format(
tag,
- BufferedWriterType,
+ Writer,
writer,
vals[0],
global,
@@ -918,7 +938,11 @@ pub const ZigConsoleClient = struct {
return;
}
- defer writer.context.flush() catch {};
+ defer {
+ if (comptime Writer != RawWriter) {
+ if (flush) writer.context.flush() catch {};
+ }
+ }
var this_value: JSValue = vals[0];
fmt = Formatter{ .remaining_values = vals[0..len][1..] };
@@ -940,7 +964,7 @@ pub const ZigConsoleClient = struct {
tag.tag = .StringPossiblyFormatted;
}
- fmt.format(tag, BufferedWriterType, writer, this_value, global, true);
+ fmt.format(tag, Writer, writer, this_value, global, true);
if (fmt.remaining_values.len == 0) {
break;
}
@@ -962,7 +986,7 @@ pub const ZigConsoleClient = struct {
tag.tag = .StringPossiblyFormatted;
}
- fmt.format(tag, BufferedWriterType, writer, this_value, global, false);
+ fmt.format(tag, Writer, writer, this_value, global, false);
if (fmt.remaining_values.len == 0)
break;
@@ -1636,7 +1660,7 @@ pub const ZigConsoleClient = struct {
const id = std.hash.Wyhash.hash(0, chars[0..len]);
var result = (pending_time_logs.fetchPut(id, null) catch null) orelse return;
- const value: std.time.Timer = result.value orelse return;
+ var value: std.time.Timer = result.value orelse return;
// get the duration in microseconds
// then display it in milliseconds
Output.printElapsed(@intToFloat(f64, value.read() / std.time.ns_per_us) / std.time.us_per_ms);
diff --git a/src/javascript/jsc/bindings/process.d.ts b/src/javascript/jsc/bindings/process.d.ts
new file mode 100644
index 000000000..194ea2b6a
--- /dev/null
+++ b/src/javascript/jsc/bindings/process.d.ts
@@ -0,0 +1,8 @@
+/**
+ * The process object provides information about, and control over, the
+ * current Bun.js process. While it is available as a global, it is
+ * recommended to explicitly access it via require or import
+ */
+export interface Process {
+ //
+}
diff --git a/src/javascript/jsc/bindings/shimmer.zig b/src/javascript/jsc/bindings/shimmer.zig
index b4d5fe16b..116327207 100644
--- a/src/javascript/jsc/bindings/shimmer.zig
+++ b/src/javascript/jsc/bindings/shimmer.zig
@@ -127,7 +127,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp
if (!@hasDecl(Parent, typeName)) {
@compileError(@typeName(Parent) ++ " is missing cppFn: " ++ typeName);
}
- break :ret std.meta.declarationInfo(Parent, typeName).data.Fn.return_type;
+ break :ret @typeInfo(@TypeOf(@field(Parent, typeName))).Fn.return_type.?;
}) {
@setEvalBranchQuota(99999);
if (comptime is_bindgen) {
@@ -135,7 +135,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp
} else {
const Fn = comptime @field(headers, symbolName(typeName));
return matchNullable(
- comptime std.meta.declarationInfo(Parent, typeName).data.Fn.return_type,
+ comptime @typeInfo(@TypeOf(@field(Parent, typeName))).Fn.return_type.?,
comptime @typeInfo(@TypeOf(Fn)).Fn.return_type.?,
@call(.{}, Fn, args),
);
diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig
index afdf01587..828bad39f 100644
--- a/src/javascript/jsc/javascript.zig
+++ b/src/javascript/jsc/javascript.zig
@@ -250,29 +250,60 @@ pub const Bun = struct {
if (arguments.len == 0)
return ZigString.Empty.toValue(ctx.ptr()).asObjectRef();
- var array = std.ArrayList(u8).init(getAllocator(ctx));
- var writer = array.writer();
+ for (arguments) |arg| {
+ JSC.C.JSValueProtect(ctx, arg);
+ }
+ defer {
+ for (arguments) |arg| {
+ JSC.C.JSValueUnprotect(ctx, arg);
+ }
+ }
+
+ // very stable memory address
+ var array = MutableString.init(getAllocator(ctx), 0) catch unreachable;
+ var buffered_writer_ = MutableString.BufferedWriter{ .context = &array };
+ var buffered_writer = &buffered_writer_;
+
+ var writer = buffered_writer.writer();
+ const Writer = @TypeOf(writer);
// we buffer this because it'll almost always be < 4096
- const BufferedWriter = std.io.BufferedWriter(4096, std.ArrayList(u8).Writer);
- var buffered_writer = BufferedWriter{ .unbuffered_writer = writer };
+ // when it's under 4096, we want to avoid the dynamic allocation
ZigConsoleClient.format(
.Debug,
ctx.ptr(),
@ptrCast([*]const JSValue, arguments.ptr),
arguments.len,
- @TypeOf(buffered_writer.writer()),
- buffered_writer.writer(),
+ Writer,
+ Writer,
+ writer,
+ false,
false,
false,
);
- buffered_writer.flush() catch unreachable;
- var zig_str = ZigString.init(array.toOwnedSlice()).withEncoding();
- if (zig_str.len == 0) return ZigString.Empty.toValue(ctx.ptr()).asObjectRef();
- if (!zig_str.isUTF8()) {
- return zig_str.toExternalValue(ctx.ptr()).asObjectRef();
- } else {
+
+ // when it's a small thing, rely on GC to manage the memory
+ if (writer.context.pos < 2048 and array.list.items.len == 0) {
+ var slice = writer.context.buffer[0..writer.context.pos];
+ if (slice.len == 0) {
+ return ZigString.Empty.toValue(ctx.ptr()).asObjectRef();
+ }
+
+ var zig_str = ZigString.init(slice).withEncoding();
return zig_str.toValueGC(ctx.ptr()).asObjectRef();
}
+
+ // when it's a big thing, we will manage it
+ {
+ writer.context.flush() catch {};
+ var slice = writer.context.context.toOwnedSlice();
+
+ var zig_str = ZigString.init(slice).withEncoding();
+ if (!zig_str.isUTF8()) {
+ return zig_str.toExternalValue(ctx.ptr()).asObjectRef();
+ } else {
+ return zig_str.toValueGC(ctx.ptr()).asObjectRef();
+ }
+ }
}
pub fn registerMacro(
@@ -501,7 +532,7 @@ pub const Bun = struct {
exception: js.ExceptionRef,
) js.JSValueRef {
const path = buf_z.ptr[0..buf_z.len];
- var file = std.fs.cwd().openFileZ(buf_z, .{ .read = true, .write = false }) catch |err| {
+ var file = std.fs.cwd().openFileZ(buf_z, .{ .mode = .read_only }) catch |err| {
JSError(getAllocator(ctx), "Opening file {s} for path: \"{s}\"", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
@@ -541,7 +572,7 @@ pub const Bun = struct {
) js.JSValueRef {
const path = buf_z.ptr[0..buf_z.len];
- var file = std.fs.cwd().openFileZ(buf_z, .{ .read = true, .write = false }) catch |err| {
+ var file = std.fs.cwd().openFileZ(buf_z, .{ .mode = .read_only }) catch |err| {
JSError(getAllocator(ctx), "Opening file {s} for path: \"{s}\"", .{ @errorName(err), path }, ctx, exception);
return js.JSValueMakeUndefined(ctx);
};
@@ -2728,14 +2759,11 @@ pub const VirtualMachine = struct {
}
}
- const ChildWriterType = comptime if (@typeInfo(Writer) == .Pointer)
- Writer
- else
- *Writer;
-
if (value.isAggregateError(this.global)) {
const AggregateErrorIterator = struct {
- pub var current_exception_list: ?*ExceptionList = null;
+ writer: Writer,
+ current_exception_list: ?*ExceptionList = null,
+
pub fn iteratorWithColor(_vm: [*c]VM, globalObject: [*c]JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void {
iterator(_vm, globalObject, nextValue, ctx.?, true);
}
@@ -2743,26 +2771,15 @@ pub const VirtualMachine = struct {
iterator(_vm, globalObject, nextValue, ctx.?, false);
}
inline fn iterator(_: [*c]VM, _: [*c]JSGlobalObject, nextValue: JSValue, ctx: ?*anyopaque, comptime color: bool) void {
- var casted = @intToPtr(ChildWriterType, @ptrToInt(ctx));
- if (comptime ChildWriterType == Writer) {
- VirtualMachine.vm.printErrorlikeObject(nextValue, null, current_exception_list, ChildWriterType, casted, color);
- } else {
- VirtualMachine.vm.printErrorlikeObject(nextValue, null, current_exception_list, Writer, casted.*, color);
- }
+ var this_ = @intToPtr(*@This(), @ptrToInt(ctx));
+ VirtualMachine.vm.printErrorlikeObject(nextValue, null, this_.current_exception_list, Writer, this_.writer, color);
}
};
- AggregateErrorIterator.current_exception_list = exception_list;
- defer AggregateErrorIterator.current_exception_list = null;
- var writer_ctx: ?*anyopaque = null;
- if (comptime @typeInfo(Writer) == .Pointer) {
- writer_ctx = @intToPtr(?*anyopaque, @ptrToInt(writer));
- } else {
- writer_ctx = @intToPtr(?*anyopaque, @ptrToInt(&writer));
- }
+ var iter = AggregateErrorIterator{ .writer = writer, .current_exception_list = exception_list };
if (comptime allow_ansi_color) {
- value.getErrorsProperty(this.global).forEach(this.global, writer_ctx, AggregateErrorIterator.iteratorWithColor);
+ value.getErrorsProperty(this.global).forEach(this.global, &iter, AggregateErrorIterator.iteratorWithColor);
} else {
- value.getErrorsProperty(this.global).forEach(this.global, writer_ctx, AggregateErrorIterator.iteratorWithOutColor);
+ value.getErrorsProperty(this.global).forEach(this.global, &iter, AggregateErrorIterator.iteratorWithOutColor);
}
return;
}
@@ -2897,7 +2914,7 @@ pub const VirtualMachine = struct {
var line_numbers = exception.stack.source_lines_numbers[0..exception.stack.source_lines_len];
var max_line: i32 = -1;
- for (line_numbers) |line| max_line = std.math.max(max_line, line);
+ for (line_numbers) |line| max_line = @maximum(max_line, line);
const max_line_number_pad = std.fmt.count("{d}", .{max_line});
var source_lines = exception.stack.sourceLineIterator();
diff --git a/src/javascript/jsc/node/types.zig b/src/javascript/jsc/node/types.zig
index a02645853..0eac3b504 100644
--- a/src/javascript/jsc/node/types.zig
+++ b/src/javascript/jsc/node/types.zig
@@ -804,12 +804,12 @@ fn StatsLike(comptime name: string, comptime T: type) type {
.birthtime_ms = if (Environment.isLinux)
0
else
- @truncate(T, @intCast(i64, if (stat_.birthtimensec > 0) (@intCast(usize, stat_.birthtimensec) / std.time.ns_per_ms) else 0)),
+ @truncate(T, @intCast(i64, if (stat_.birthtime().tv_nsec > 0) (@intCast(usize, stat_.birthtime().tv_nsec) / std.time.ns_per_ms) else 0)),
.birthtime = if (Environment.isLinux)
@intToEnum(Date, 0)
else
- @intToEnum(Date, @intCast(u64, @maximum(stat_.birthtimesec, 0))),
+ @intToEnum(Date, @intCast(u64, @maximum(stat_.birthtime().tv_sec, 0))),
};
}
@@ -2427,7 +2427,7 @@ pub const Process = struct {
{
var args_iterator = std.process.args();
- if (args_iterator.nextPosix()) |arg0| {
+ if (args_iterator.next()) |arg0| {
var argv0 = JSC.ZigString.init(std.mem.span(arg0));
argv0.setOutputEncoding();
// https://github.com/yargs/yargs/blob/adb0d11e02c613af3d9427b3028cc192703a3869/lib/utils/process-argv.ts#L1
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 630335dfd..6f73f426a 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -26,6 +26,7 @@ const StringHashMap = _hash_map.StringHashMap;
const AutoHashMap = _hash_map.AutoHashMap;
const StringHashMapUnmanaged = _hash_map.StringHashMapUnmanaged;
const is_bindgen = std.meta.globalOption("bindgen", bool) orelse false;
+const ComptimeStringMap = _global.ComptimeStringMap;
pub fn NewBaseStore(comptime Union: anytype, comptime count: usize) type {
var max_size = 0;
var max_align = 1;
@@ -343,35 +344,47 @@ pub const Flags = struct {
is_key_before_rest: bool = false,
};
- pub const Property = packed struct {
- is_computed: bool = false,
- is_method: bool = false,
- is_static: bool = false,
- was_shorthand: bool = false,
- is_spread: bool = false,
+ pub const Property = enum {
+ is_computed,
+ is_method,
+ is_static,
+ was_shorthand,
+ is_spread,
- const None = Flags.Property{};
+ pub inline fn init(fields: Fields) Set {
+ return Set.init(fields);
+ }
+
+ pub const None = Set{};
+ pub const Fields = std.enums.EnumFieldStruct(Flags.Property, bool, false);
+ pub const Set = std.enums.EnumSet(Flags.Property);
};
- pub const Function = packed struct {
- is_async: bool = false,
- is_generator: bool = false,
- has_rest_arg: bool = false,
- has_if_scope: bool = false,
+ pub const Function = enum {
+ is_async,
+ is_generator,
+ has_rest_arg,
+ has_if_scope,
- is_forward_declaration: bool = false,
+ is_forward_declaration,
- // This is true if the function is a method
- is_unique_formal_parameters: bool = false,
+ /// This is true if the function is a method
+ is_unique_formal_parameters,
- // Only applicable to function statements.
- is_export: bool = false,
+ /// Only applicable to function statements.
+ is_export,
+
+ /// Used for Hot Module Reloading's wrapper function
+ /// "iife" stands for "immediately invoked function expression"
+ print_as_iife,
- // Used for Hot Module Reloading's wrapper function
- // "iife" stands for "immediately invoked function expression"
- print_as_iife: bool = false,
+ pub inline fn init(fields: Fields) Set {
+ return Set.init(fields);
+ }
- const None = Flags.Function{};
+ pub const None = Set{};
+ pub const Fields = std.enums.EnumFieldStruct(Function, bool, false);
+ pub const Set = std.enums.EnumSet(Function);
};
};
@@ -447,7 +460,7 @@ pub const Binding = struct {
properties[i] = G.Property{
.flags = item.flags,
.key = item.key,
- .kind = if (item.flags.is_spread)
+ .kind = if (item.flags.contains(.is_spread))
G.Property.Kind.spread
else
G.Property.Kind.normal,
@@ -549,7 +562,7 @@ pub const B = union(Binding.Tag) {
};
pub const Property = struct {
- flags: Flags.Property = Flags.Property.None,
+ flags: Flags.Property.Set = Flags.Property.None,
key: ExprNodeIndex,
value: BindingNodeIndex,
default_value: ?ExprNodeIndex = null,
@@ -645,7 +658,7 @@ pub const G = struct {
//
initializer: ?ExprNodeIndex = null,
kind: Kind = Kind.normal,
- flags: Flags.Property = Flags.Property.None,
+ flags: Flags.Property.Set = Flags.Property.None,
pub const List = BabyList(Property);
@@ -676,7 +689,7 @@ pub const G = struct {
body: FnBody = FnBody{ .loc = logger.Loc.Empty, .stmts = &([_]StmtNodeIndex{}) },
arguments_ref: ?Ref = null,
- flags: Flags.Function = Flags.Function.None,
+ flags: Flags.Function.Set = Flags.Function.None,
};
pub const Arg = struct {
ts_decorators: ExprNodeList = ExprNodeList{},
@@ -1281,7 +1294,7 @@ pub const E = struct {
key,
any,
- pub const Map = std.ComptimeStringMap(SpecialProp, .{
+ pub const Map = ComptimeStringMap(SpecialProp, .{
.{ "__self", .__self },
.{ "__source", .__source },
.{ "key", .key },
@@ -1591,39 +1604,47 @@ pub const E = struct {
}
const PackageJSONSort = struct {
- const Fields = struct {
- const name: u8 = 0;
- const version: u8 = 1;
- const main: u8 = 2;
- const module: u8 = 3;
- const dependencies: u8 = 3;
- const devDependencies: u8 = 4;
- const optionalDependencies: u8 = 5;
- const peerDependencies: u8 = 6;
- const exports: u8 = 7;
-
- pub const Map = std.ComptimeStringMap(u8, .{
- .{ "name", name },
- .{ "version", version },
- .{ "main", main },
- .{ "module", module },
- .{ "dependencies", dependencies },
- .{ "devDependencies", devDependencies },
- .{ "optionalDependencies", optionalDependencies },
- .{ "peerDependencies", peerDependencies },
- .{ "exports", exports },
+ const Fields = enum(u8) {
+ name,
+ version,
+ author,
+ repository,
+ config,
+ main,
+ module,
+ dependencies,
+ devDependencies,
+ optionalDependencies,
+ peerDependencies,
+ exports,
+ __fake,
+
+ pub const Map = ComptimeStringMap(Fields, .{
+ .{ "name", Fields.name },
+ .{ "version", Fields.version },
+ .{ "author", Fields.author },
+ .{ "repository", Fields.repository },
+ .{ "config", Fields.config },
+ .{ "main", Fields.main },
+ .{ "module", Fields.module },
+ .{ "dependencies", Fields.dependencies },
+ .{ "devDependencies", Fields.devDependencies },
+ .{ "optionalDependencies", Fields.optionalDependencies },
+ .{ "peerDependencies", Fields.peerDependencies },
+ .{ "exports", Fields.exports },
});
+ const max_key_size = 12;
pub fn isLessThan(ctx: void, lhs: G.Property, rhs: G.Property) bool {
- var lhs_key_size: u8 = 8;
- var rhs_key_size: u8 = 8;
+ var lhs_key_size: u8 = @enumToInt(Fields.__fake);
+ var rhs_key_size: u8 = @enumToInt(Fields.__fake);
if (lhs.key != null and lhs.key.?.data == .e_string) {
- lhs_key_size = Map.get(lhs.key.?.data.e_string.utf8) orelse 8;
+ lhs_key_size = @enumToInt(Map.get(lhs.key.?.data.e_string.utf8) orelse Fields.__fake);
}
if (rhs.key != null and rhs.key.?.data == .e_string) {
- rhs_key_size = Map.get(rhs.key.?.data.e_string.utf8) orelse 8;
+ rhs_key_size = @enumToInt(Map.get(rhs.key.?.data.e_string.utf8) orelse Fields.__fake);
}
return switch (std.math.order(lhs_key_size, rhs_key_size)) {
@@ -5628,7 +5649,7 @@ pub const Macro = struct {
break :brk list;
};
- pub const names = std.ComptimeStringMap(Tag, .{
+ pub const names = ComptimeStringMap(Tag, .{
.{ "array", Tag.e_array },
.{ "unary", Tag.e_unary },
.{ "binary", Tag.e_binary },
@@ -6745,8 +6766,8 @@ pub const Macro = struct {
.e_missing, .e_undefined => null,
else => expr,
};
- property.flags.is_spread = expr.data == .e_spread;
- expect_key = property.value == null or !property.flags.is_spread;
+ property.flags.setPresent(.is_spread, expr.data == .e_spread);
+ expect_key = property.value == null or !property.flags.contains(.is_spread);
},
TagOrJSNode.node => |node| {
const expr = node.toExpr();
@@ -6754,8 +6775,8 @@ pub const Macro = struct {
.e_missing, .e_undefined => null,
else => expr,
};
- property.flags.is_spread = expr.data == .e_spread;
- expect_key = property.value == null or !property.flags.is_spread;
+ property.flags.setPresent(.is_spread, expr.data == .e_spread);
+ expect_key = property.value == null or !property.flags.contains(.is_spread);
},
TagOrJSNode.invalid => {
return false;
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index 3bbb6d165..8beb9ccd1 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -1884,6 +1884,14 @@ fn NewLexer_(
}
}
+ pub fn toUTF8EString(lexer: *LexerType) js_ast.E.String {
+ if (lexer.string_literal_is_ascii) {
+ return js_ast.E.String{ .utf8 = lexer.string_literal_slice };
+ } else {
+ return js_ast.E.String{ .utf8 = lexer.utf16ToString(lexer.string_literal) };
+ }
+ }
+
inline fn assertNotJSON(_: *const LexerType) void {
if (comptime is_json) @compileError("JSON should not reach this point");
if (comptime is_json) unreachable;
diff --git a/src/js_lexer/identifier_data.zig b/src/js_lexer/identifier_data.zig
index 8be15dbab..c9c83d113 100644
--- a/src/js_lexer/identifier_data.zig
+++ b/src/js_lexer/identifier_data.zig
@@ -121,19 +121,19 @@ test "Check" {
const id_start_cached_correct = Cache.CachedBitset{ .range = id_start_range, .len = id_start_count + 1 };
const id_continue_cached_correct = Cache.CachedBitset{ .range = id_end_range, .len = id_end_count + 1 };
try std.os.chdir(std.fs.path.dirname(@src().file).?);
- var start_cached = try std.fs.cwd().openFileZ("id_start_bitset.meta.blob", .{ .read = true });
+ var start_cached = try std.fs.cwd().openFileZ("id_start_bitset.meta.blob", .{ .mode = .read_only });
var start_cached_data = try start_cached.readToEndAlloc(std.heap.c_allocator, 4096);
try std.testing.expectEqualSlices(u8, start_cached_data, std.mem.asBytes(&id_start_cached_correct));
- var continue_cached = try std.fs.cwd().openFileZ("id_continue_bitset.meta.blob", .{ .read = true });
+ var continue_cached = try std.fs.cwd().openFileZ("id_continue_bitset.meta.blob", .{ .mode = .read_only });
var continue_cached_data = try continue_cached.readToEndAlloc(std.heap.c_allocator, 4096);
try std.testing.expectEqualSlices(u8, continue_cached_data, std.mem.asBytes(&id_continue_cached_correct));
- var start_blob_file = try std.fs.cwd().openFileZ("id_start_bitset.blob", .{ .read = true });
+ var start_blob_file = try std.fs.cwd().openFileZ("id_start_bitset.blob", .{ .mode = .read_only });
var start_blob_data = try start_blob_file.readToEndAlloc(std.heap.c_allocator, try start_blob_file.getEndPos());
- var continue_blob_file = try std.fs.cwd().openFileZ("id_continue_bitset.blob", .{ .read = true });
+ var continue_blob_file = try std.fs.cwd().openFileZ("id_continue_bitset.blob", .{ .mode = .read_only });
var continue_blob_data = try continue_blob_file.readToEndAlloc(std.heap.c_allocator, try continue_blob_file.getEndPos());
try std.testing.expectEqualSlices(u8, start_blob_data, std.mem.asBytes(&id_start.masks));
diff --git a/src/js_lexer_tables.zig b/src/js_lexer_tables.zig
index 875cf5784..9b3898aa8 100644
--- a/src/js_lexer_tables.zig
+++ b/src/js_lexer_tables.zig
@@ -6,6 +6,7 @@ const unicode = std.unicode;
const default_allocator = @import("./global.zig").default_allocator;
const string = @import("string_types.zig").string;
const CodePoint = @import("string_types.zig").CodePoint;
+const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
pub const T = enum(u8) {
t_end_of_file,
@@ -152,7 +153,7 @@ pub const T = enum(u8) {
}
};
-pub const Keywords = std.ComptimeStringMap(T, .{
+pub const Keywords = ComptimeStringMap(T, .{
.{ "break", .t_break },
.{ "case", .t_case },
.{ "catch", .t_catch },
@@ -191,7 +192,7 @@ pub const Keywords = std.ComptimeStringMap(T, .{
.{ "with", .t_with },
});
-pub const StrictModeReservedWords = std.ComptimeStringMap(bool, .{
+pub const StrictModeReservedWords = ComptimeStringMap(bool, .{
.{ "implements", true },
.{ "interface", true },
.{ "let", true },
@@ -203,7 +204,7 @@ pub const StrictModeReservedWords = std.ComptimeStringMap(bool, .{
.{ "yield", true },
});
-pub const StrictModeReservedWordsRemap = std.ComptimeStringMap(string, .{
+pub const StrictModeReservedWordsRemap = ComptimeStringMap(string, .{
.{ "implements", "_implements" },
.{ "interface", "_interface" },
.{ "let", "_let" },
@@ -228,7 +229,7 @@ pub const PropertyModifierKeyword = enum {
p_set,
p_static,
- pub const List = std.ComptimeStringMap(PropertyModifierKeyword, .{
+ pub const List = ComptimeStringMap(PropertyModifierKeyword, .{
.{ "abstract", .p_abstract },
.{ "async", .p_async },
.{ "declare", .p_declare },
@@ -243,7 +244,7 @@ pub const PropertyModifierKeyword = enum {
});
};
-pub const TypeScriptAccessibilityModifier = std.ComptimeStringMap(u1, .{
+pub const TypeScriptAccessibilityModifier = ComptimeStringMap(u1, .{
.{ "public", 1 },
.{ "private", 1 },
.{ "protected", 1 },
@@ -511,7 +512,7 @@ pub const TypescriptStmtKeyword = enum {
ts_stmt_global,
ts_stmt_declare,
- pub const List = std.ComptimeStringMap(TypescriptStmtKeyword, .{
+ pub const List = ComptimeStringMap(TypescriptStmtKeyword, .{
.{
"type",
TypescriptStmtKeyword.ts_stmt_type,
@@ -546,7 +547,7 @@ pub const TypescriptStmtKeyword = enum {
pub const JSXEntityMap = std.StringHashMap(CodePoint);
// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.
-pub const ChildlessJSXTags = std.ComptimeStringMap(void, .{
+pub const ChildlessJSXTags = ComptimeStringMap(void, .{
.{ "area", void },
.{ "base", void },
.{ "br", void },
diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig
index 8b28c8eee..0ee6c5edf 100644
--- a/src/js_parser/js_parser.zig
+++ b/src/js_parser/js_parser.zig
@@ -38,6 +38,7 @@ pub const ExprNodeIndex = js_ast.ExprNodeIndex;
pub const ExprNodeList = js_ast.ExprNodeList;
pub const StmtNodeList = js_ast.StmtNodeList;
pub const BindingNodeList = js_ast.BindingNodeList;
+const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
fn _disabledAssert(_: bool) void {
if (!Environment.allow_assert) @compileLog("assert is missing an if (Environment.allow_assert)");
@@ -279,7 +280,7 @@ pub const TypeScript = struct {
else => return null,
}
}
- pub const IMap = std.ComptimeStringMap(Kind, .{
+ pub const IMap = ComptimeStringMap(Kind, .{
.{ "unique", .unique },
.{ "abstract", .abstract },
.{ "asserts", .asserts },
@@ -581,13 +582,13 @@ pub const ImportScanner = struct {
},
.s_function => |st| {
- if (st.func.flags.is_export) {
+ if (st.func.flags.contains(.is_export)) {
if (st.func.name) |name| {
const original_name = p.symbols.items[name.ref.?.innerIndex()].original_name;
try p.recordExport(name.loc, original_name, name.ref.?);
if (p.options.features.hot_module_reloading) {
- st.func.flags.is_export = false;
+ st.func.flags.remove(.is_export);
}
} else {
try p.log.addRangeError(p.source, logger.Range{ .loc = st.func.open_parens_loc, .len = 2 }, "Exported functions must have a name");
@@ -1099,7 +1100,7 @@ pub const SideEffects = enum(u1) {
var any_computed = false;
for (properties_slice) |spread| {
end = 0;
- any_computed = any_computed or spread.flags.is_computed;
+ any_computed = any_computed or spread.flags.contains(.is_computed);
if (spread.kind == .spread) {
// Spread properties must always be evaluated
for (properties_slice) |prop_| {
@@ -1108,7 +1109,7 @@ pub const SideEffects = enum(u1) {
if (prop.value != null) {
if (simpifyUnusedExpr(p, prop.value.?)) |value| {
prop.value = value;
- } else if (!prop.flags.is_computed) {
+ } else if (!prop.flags.contains(.is_computed)) {
continue;
} else {
prop.value = p.e(E.Number{ .value = 0.0 }, prop.value.?.loc);
@@ -1585,34 +1586,20 @@ const ExprOrLetStmt = struct {
const FunctionKind = enum { stmt, expr };
-const EightLetterMatcher = strings.ExactSizeMatcher(8);
-
const AsyncPrefixExpression = enum(u2) {
none,
is_yield,
is_async,
is_await,
- pub fn find(ident: string) AsyncPrefixExpression {
- if (ident.len != 5) {
- return .none;
- }
-
- switch (EightLetterMatcher.match(ident)) {
- EightLetterMatcher.case("yield") => {
- return .is_yield;
- },
- EightLetterMatcher.case("await") => {
- return .is_await;
- },
- EightLetterMatcher.case("async") => {
- return .is_async;
- },
+ const map = ComptimeStringMap(AsyncPrefixExpression, .{
+ .{ "yield", .is_yield },
+ .{ "await", .is_await },
+ .{ "async", .is_async },
+ });
- else => {
- return .none;
- },
- }
+ pub fn find(ident: string) AsyncPrefixExpression {
+ return map.get(ident) orelse .none;
}
};
@@ -2185,6 +2172,16 @@ pub const Parser = struct {
}
pub fn parse(self: *Parser) !js_ast.Result {
+ if (comptime Environment.isWasm) {
+ self.options.ts = true;
+ self.options.jsx.parse = true;
+ // if (self.options.features.is_macro_runtime) {
+ // return try self._parse(TSParserMacro);
+ // }
+
+ return try self._parse(TSXParser);
+ }
+
if (self.options.ts and self.options.features.is_macro_runtime) return try self._parse(TSParserMacro);
if (!self.options.ts and self.options.features.is_macro_runtime) return try self._parse(JSParserMacro);
@@ -3575,7 +3572,7 @@ fn NewParser_(
}
},
.s_function => |func| {
- if (func.func.flags.is_export) break :can_remove_part false;
+ if (func.func.flags.contains(.is_export)) break :can_remove_part false;
if (func.func.name) |name| {
const symbol: *const Symbol = &p.symbols.items[name.ref.?.innerIndex()];
@@ -4473,10 +4470,10 @@ fn NewParser_(
var properties = List(B.Property).initCapacity(p.allocator, ex.properties.len) catch unreachable;
for (ex.properties.slice()) |*item| {
- if (item.flags.is_method or item.kind == .get or item.kind == .set) {
+ if (item.flags.contains(.is_method) or item.kind == .get or item.kind == .set) {
invalid_loc.append(.{
.loc = item.key.?.loc,
- .kind = if (item.flags.is_method)
+ .kind = if (item.flags.contains(.is_method))
InvalidLoc.Tag.method
else if (item.kind == .get)
InvalidLoc.Tag.getter
@@ -4488,12 +4485,12 @@ fn NewParser_(
var value = &item.value.?;
const tup = p.convertExprToBindingAndInitializer(value, invalid_loc, false);
const initializer = tup.expr orelse item.initializer;
- const is_spread = item.kind == .spread or item.flags.is_spread;
+ const is_spread = item.kind == .spread or item.flags.contains(.is_spread);
properties.appendAssumeCapacity(B.Property{
- .flags = Flags.Property{
+ .flags = Flags.Property.init(.{
.is_spread = is_spread,
- .is_computed = item.flags.is_computed,
- },
+ .is_computed = item.flags.contains(.is_computed),
+ }),
.key = item.key orelse p.e(E.Missing{}, expr.loc),
.value = tup.binding orelse p.b(B.Missing{}, expr.loc),
.default_value = initializer,
@@ -4644,7 +4641,7 @@ fn NewParser_(
if (comptime is_typescript_enabled) {
// Don't output anything if it's just a forward declaration of a function
- if (opts.is_typescript_declare or func.flags.is_forward_declaration) {
+ if (opts.is_typescript_declare or func.flags.contains(.is_forward_declaration)) {
p.popAndDiscardScope(scopeIndex);
// Balance the fake block scope introduced above
@@ -4674,8 +4671,8 @@ fn NewParser_(
func.name = name_.*;
}
- func.flags.has_if_scope = hasIfScope;
- func.flags.is_export = opts.is_export;
+ func.flags.setPresent(.has_if_scope, hasIfScope);
+ func.flags.setPresent(.is_export, opts.is_export);
// Balance the fake block scope introduced above
if (hasIfScope) {
@@ -4715,11 +4712,11 @@ fn NewParser_(
var func = G.Fn{
.name = name,
- .flags = Flags.Function{
+ .flags = Flags.Function.init(.{
.has_rest_arg = false,
.is_async = opts.allow_await == .allow_expr,
.is_generator = opts.allow_yield == .allow_expr,
- },
+ }),
.arguments_ref = null,
.open_parens_loc = p.lexer.loc(),
@@ -4765,10 +4762,10 @@ fn NewParser_(
ts_decorators = try p.parseTypeScriptDecorators();
}
- if (!func.flags.has_rest_arg and p.lexer.token == T.t_dot_dot_dot) {
+ if (!func.flags.contains(.has_rest_arg) and p.lexer.token == T.t_dot_dot_dot) {
// p.markSyntaxFeature
try p.lexer.next();
- func.flags.has_rest_arg = true;
+ func.flags.insert(.has_rest_arg);
}
var is_typescript_ctor_field = false;
@@ -4822,7 +4819,7 @@ fn NewParser_(
p.declareBinding(.hoisted, &arg, &parseStmtOpts) catch unreachable;
var default_value: ?ExprNodeIndex = null;
- if (!func.flags.has_rest_arg and p.lexer.token == .t_equals) {
+ if (!func.flags.contains(.has_rest_arg) and p.lexer.token == .t_equals) {
// p.markSyntaxFeature
try p.lexer.next();
default_value = try p.parseExpr(.comma);
@@ -4841,7 +4838,7 @@ fn NewParser_(
break;
}
- if (func.flags.has_rest_arg) {
+ if (func.flags.contains(.has_rest_arg)) {
// JavaScript does not allow a comma after a rest argument
if (opts.is_typescript_declare) {
// TypeScript does allow a comma after a rest argument in a "declare" context
@@ -4880,7 +4877,7 @@ fn NewParser_(
// "function foo(): any;"
if (opts.allow_missing_body_for_type_script and p.lexer.token != .t_open_brace) {
try p.lexer.expectOrInsertSemicolon();
- func.flags.is_forward_declaration = true;
+ func.flags.insert(.is_forward_declaration);
return func;
}
var tempOpts = opts;
@@ -7663,7 +7660,7 @@ fn NewParser_(
properties.append(property) catch unreachable;
// Commas after spread elements are not allowed
- if (property.flags.is_spread and p.lexer.token == .t_comma) {
+ if (property.flags.contains(.is_spread) and p.lexer.token == .t_comma) {
p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable;
return error.SyntaxError;
}
@@ -7717,7 +7714,7 @@ fn NewParser_(
return B.Property{
.key = p.e(E.Missing{}, p.lexer.loc()),
- .flags = Flags.Property{ .is_spread = true },
+ .flags = Flags.Property.init(.{ .is_spread = true }),
.value = value,
};
},
@@ -7784,9 +7781,9 @@ fn NewParser_(
}
return B.Property{
- .flags = Flags.Property{
+ .flags = Flags.Property.init(.{
.is_computed = is_computed,
- },
+ }),
.key = key,
.value = value,
.default_value = default_value,
@@ -8465,13 +8462,13 @@ fn NewParser_(
if (func.name) |name| {
const original_name = p.symbols.items[name.ref.?.innerIndex()].original_name;
- if (func.flags.is_async and strings.eqlComptime(original_name, "await")) {
+ if (func.flags.contains(.is_async) and strings.eqlComptime(original_name, "await")) {
p.log.addRangeError(
p.source,
js_lexer.rangeOfIdentifier(p.source, name.loc),
"An async function cannot be named \"await\"",
) catch unreachable;
- } else if (kind == .expr and func.flags.is_generator and strings.eqlComptime(original_name, "yield")) {
+ } else if (kind == .expr and func.flags.contains(.is_generator) and strings.eqlComptime(original_name, "yield")) {
p.log.addRangeError(
p.source,
js_lexer.rangeOfIdentifier(p.source, name.loc),
@@ -9188,9 +9185,9 @@ fn NewParser_(
.key = key,
.value = value,
.initializer = initializer,
- .flags = Flags.Property{
+ .flags = Flags.Property.init(.{
.was_shorthand = true,
- },
+ }),
};
}
},
@@ -9278,10 +9275,10 @@ fn NewParser_(
return G.Property{
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
.kind = kind,
- .flags = Flags.Property{
+ .flags = Flags.Property.init(.{
.is_computed = is_computed,
.is_static = opts.is_static,
- },
+ }),
.key = key,
.initializer = initializer,
};
@@ -9336,14 +9333,14 @@ fn NewParser_(
});
// "class Foo { foo(): void; foo(): void {} }"
- if (func.flags.is_forward_declaration) {
+ if (func.flags.contains(.is_forward_declaration)) {
// Skip this property entirely
p.popAndDiscardScope(scope_index);
return null;
}
p.popScope();
- func.flags.is_unique_formal_parameters = true;
+ func.flags.insert(.is_unique_formal_parameters);
const value = p.e(E.Function{ .func = func }, loc);
// Enforce argument rules for accessors
@@ -9410,11 +9407,11 @@ fn NewParser_(
return G.Property{
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
.kind = kind,
- .flags = Flags.Property{
+ .flags = Flags.Property.init(.{
.is_computed = is_computed,
.is_method = true,
.is_static = opts.is_static,
- },
+ }),
.key = key,
.value = value,
};
@@ -9426,9 +9423,9 @@ fn NewParser_(
return G.Property{
.kind = kind,
- .flags = Flags.Property{
+ .flags = Flags.Property.init(.{
.is_computed = is_computed,
- },
+ }),
.key = key,
.value = value,
};
@@ -11572,7 +11569,7 @@ fn NewParser_(
},
.b_object => |bi| {
for (bi.properties) |*property| {
- if (!property.flags.is_spread and !p.exprCanBeRemovedIfUnused(&property.key)) {
+ if (!property.flags.contains(.is_spread) and !p.exprCanBeRemovedIfUnused(&property.key)) {
return false;
}
@@ -11729,7 +11726,7 @@ fn NewParser_(
var func = _func;
const old_fn_or_arrow_data = p.fn_or_arrow_data_visit;
const old_fn_only_data = p.fn_only_data_visit;
- p.fn_or_arrow_data_visit = FnOrArrowDataVisit{ .is_async = func.flags.is_async };
+ p.fn_or_arrow_data_visit = FnOrArrowDataVisit{ .is_async = func.flags.contains(.is_async) };
p.fn_only_data_visit = FnOnlyDataVisit{ .is_this_nested = true, .arguments_ref = func.arguments_ref };
if (func.name) |name| {
@@ -11748,7 +11745,7 @@ fn NewParser_(
p.visitArgs(
func.args,
VisitArgsOpts{
- .has_rest_arg = func.flags.has_rest_arg,
+ .has_rest_arg = func.flags.contains(.has_rest_arg),
.body = body.stmts,
.is_unique_formal_parameters = true,
},
@@ -12992,9 +12989,9 @@ fn NewParser_(
property.key = p.visitExpr(property.key orelse Global.panic("Expected property key", .{}));
const key = property.key.?;
// Forbid duplicate "__proto__" properties according to the specification
- if (!property.flags.is_computed and
- !property.flags.was_shorthand and
- !property.flags.is_method and
+ if (!property.flags.contains(.is_computed) and
+ !property.flags.contains(.was_shorthand) and
+ !property.flags.contains(.is_method) and
in.assign_target == .none and
key.data.isStringValue() and
strings.eqlComptime(
@@ -13116,12 +13113,24 @@ fn NewParser_(
}
if (e_.optional_chain == null and @as(Expr.Tag, e_.target.data) == .e_identifier and e_.target.data.e_identifier.ref.eql(p.require_ref)) {
+ e_.can_be_unwrapped_if_unused = false;
+
// Heuristic: omit warnings inside try/catch blocks because presumably
// the try/catch statement is there to handle the potential run-time
// error from the unbundled require() call failing.
if (e_.args.len == 1) {
- return p.require_transposer.maybeTransposeIf(e_.args.first_(), null);
- } else if (p.options.warn_about_unbundled_modules) {
+ const first = e_.args.first_();
+ if (first.data == .e_string) {
+ // require(FOO) => require(FOO)
+ return p.transposeRequire(first, null);
+ } else if (first.data == .e_if) {
+ // require(FOO ? '123' : '456') => FOO ? require('123') : require('456')
+ // This makes static analysis later easier
+ return p.require_transposer.maybeTransposeIf(first, null);
+ }
+ }
+
+ if (p.options.warn_about_unbundled_modules) {
const r = js_lexer.rangeOfIdentifier(p.source, e_.target.loc);
p.log.addRangeDebug(p.source, r, "This call to \"require\" will not be bundled because it has multiple arguments") catch unreachable;
}
@@ -13465,7 +13474,7 @@ fn NewParser_(
for (ex.properties.slice()) |*property| {
// The key must still be evaluated if it's computed or a spread
- if (property.kind == .spread or property.flags.is_computed or property.flags.is_spread) {
+ if (property.kind == .spread or property.flags.contains(.is_computed) or property.flags.contains(.is_spread)) {
return false;
}
@@ -14372,8 +14381,8 @@ fn NewParser_(
data.func = p.visitFunc(data.func, data.func.open_parens_loc);
// Handle exporting this function from a namespace
- if (data.func.flags.is_export and p.enclosing_namespace_arg_ref != null) {
- data.func.flags.is_export = false;
+ if (data.func.flags.contains(.is_export) and p.enclosing_namespace_arg_ref != null) {
+ data.func.flags.remove(.is_export);
const enclosing_namespace_arg_ref = p.enclosing_namespace_arg_ref orelse unreachable;
stmts.ensureUnusedCapacity(3) catch unreachable;
@@ -14598,7 +14607,7 @@ fn NewParser_(
{
var object = expr.data.e_object;
for (bound_object.properties) |property| {
- if (property.flags.is_spread) return;
+ if (property.flags.contains(.is_spread)) return;
}
var output_properties = object.properties.slice();
var end: u32 = 0;
@@ -15089,7 +15098,7 @@ fn NewParser_(
},
.b_object => |bind| {
for (bind.properties) |*property| {
- if (!property.flags.is_spread) {
+ if (!property.flags.contains(.is_spread)) {
property.key = p.visitExpr(property.key);
}
@@ -15128,7 +15137,7 @@ fn NewParser_(
fn visitSingleStmt(p: *P, stmt: Stmt, kind: StmtsKind) Stmt {
const has_if_scope = switch (stmt.data) {
- .s_function => stmt.data.s_function.func.flags.has_if_scope,
+ .s_function => stmt.data.s_function.func.flags.contains(.has_if_scope),
else => false,
};
@@ -15293,13 +15302,13 @@ fn NewParser_(
var constructor_function_: ?*E.Function = null;
var name_to_keep: ?string = null;
- if (is_private) {} else if (!property.flags.is_method and !property.flags.is_computed) {
+ if (is_private) {} else if (!property.flags.contains(.is_method) and !property.flags.contains(.is_computed)) {
if (property.key) |key| {
if (@as(Expr.Tag, key.data) == .e_string) {
name_to_keep = key.data.e_string.string(p.allocator) catch unreachable;
}
}
- } else if (property.flags.is_method) {
+ } else if (property.flags.contains(.is_method)) {
if (comptime is_typescript_enabled) {
if (property.value.?.data == .e_function and property.key.?.data == .e_string and
property.key.?.data.e_string.eqlComptime("constructor"))
@@ -15917,7 +15926,7 @@ fn NewParser_(
.open_parens_loc = logger.Loc.Empty,
.args = require_function_args,
.body = .{ .loc = logger.Loc.Empty, .stmts = parts[parts.len - 1].stmts },
- .flags = .{ .is_export = true },
+ .flags = Flags.Function.init(.{ .is_export = true }),
} },
logger.Loc.Empty,
);
@@ -16259,10 +16268,10 @@ fn NewParser_(
.body = .{ .loc = logger.Loc.Empty, .stmts = part_stmts[0 .. part_stmts_i + 1] },
.name = null,
.open_parens_loc = logger.Loc.Empty,
- .flags = .{
+ .flags = Flags.Function.init(.{
.print_as_iife = true,
.is_async = is_async,
- },
+ }),
},
},
logger.Loc.Empty,
@@ -16361,7 +16370,7 @@ fn NewParser_(
}
part.stmts = _stmts[0 .. imports_list.len + toplevel_stmts.len + exports_from.len];
- }
+ } else if (p.options.features.hot_module_reloading) {}
{
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 42298c4c0..a424f061d 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -590,7 +590,7 @@ pub fn NewPrinter(
}
pub fn printFunc(p: *Printer, func: G.Fn) void {
- p.printFnArgs(func.args, func.flags.has_rest_arg, false);
+ p.printFnArgs(func.args, func.flags.contains(.has_rest_arg), false);
p.printSpace();
p.printBlock(func.body.loc, func.body.stmts);
}
@@ -1486,11 +1486,11 @@ pub fn NewPrinter(
}
p.printSpaceBeforeIdentifier();
- if (e.func.flags.is_async) {
+ if (e.func.flags.contains(.is_async)) {
p.print("async ");
}
p.print("function");
- if (e.func.flags.is_generator) {
+ if (e.func.flags.contains(.is_generator)) {
p.print("*");
p.printSpace();
}
@@ -2053,45 +2053,55 @@ pub fn NewPrinter(
}
pub fn printProperty(p: *Printer, item: G.Property) void {
- if (item.kind == .spread) {
- p.print("...");
- p.printExpr(item.value.?, .comma, ExprFlag.None());
- return;
- }
-
- if (item.flags.is_static) {
- p.print("static");
- p.printSpace();
- }
+ if (comptime !is_json) {
+ if (item.kind == .spread) {
+ if (comptime is_json and Environment.allow_assert)
+ unreachable;
+ p.print("...");
+ p.printExpr(item.value.?, .comma, ExprFlag.None());
+ return;
+ }
- switch (item.kind) {
- .get => {
- p.printSpaceBeforeIdentifier();
- p.print("get");
- p.printSpace();
- },
- .set => {
- p.printSpaceBeforeIdentifier();
- p.print("set");
+ if (item.flags.contains(.is_static)) {
+ if (comptime is_json and Environment.allow_assert)
+ unreachable;
+ p.print("static");
p.printSpace();
- },
- else => {},
+ }
+
+ switch (item.kind) {
+ .get => {
+ if (comptime is_json and Environment.allow_assert)
+ unreachable;
+ p.printSpaceBeforeIdentifier();
+ p.print("get");
+ p.printSpace();
+ },
+ .set => {
+ if (comptime is_json and Environment.allow_assert)
+ unreachable;
+ p.printSpaceBeforeIdentifier();
+ p.print("set");
+ p.printSpace();
+ },
+ else => {},
+ }
}
if (item.value) |val| {
switch (val.data) {
.e_function => |func| {
- if (item.flags.is_method) {
- if (func.func.flags.is_async) {
+ if (item.flags.contains(.is_method)) {
+ if (func.func.flags.contains(.is_async)) {
p.printSpaceBeforeIdentifier();
p.print("async");
}
- if (func.func.flags.is_generator) {
+ if (func.func.flags.contains(.is_generator)) {
p.print("*");
}
- if (func.func.flags.is_generator and func.func.flags.is_async) {
+ if (func.func.flags.contains(.is_generator) and func.func.flags.contains(.is_async)) {
p.printSpace();
}
}
@@ -2113,7 +2123,7 @@ pub fn NewPrinter(
const _key = item.key.?;
- if (item.flags.is_computed) {
+ if (item.flags.contains(.is_computed)) {
p.print("[");
p.printExpr(_key, .comma, ExprFlag.None());
p.print("]");
@@ -2121,7 +2131,7 @@ pub fn NewPrinter(
if (item.value) |val| {
switch (val.data) {
.e_function => |func| {
- if (item.flags.is_method) {
+ if (item.flags.contains(.is_method)) {
p.printFunc(func.func);
return;
}
@@ -2212,11 +2222,11 @@ pub fn NewPrinter(
switch (val.data) {
.e_identifier => |e| {
- // TODO: is needing to check item.flags.was_shorthand a bug?
+ // TODO: is needing to check item.flags.contains(.was_shorthand) a bug?
// esbuild doesn't have to do that...
// maybe it's a symptom of some other underlying issue
// or maybe, it's because i'm not lowering the same way that esbuild does.
- if (item.flags.was_shorthand or strings.utf16EqlString(key.value, p.renamer.nameForSymbol(e.ref))) {
+ if (item.flags.contains(.was_shorthand) or strings.utf16EqlString(key.value, p.renamer.nameForSymbol(e.ref))) {
if (item.initializer) |initial| {
p.printInitializer(initial);
}
@@ -2263,7 +2273,7 @@ pub fn NewPrinter(
if (item.value) |val| {
switch (val.data) {
.e_function => |f| {
- if (item.flags.is_method) {
+ if (item.flags.contains(.is_method)) {
p.printFunc(f.func);
return;
@@ -2357,10 +2367,10 @@ pub fn NewPrinter(
p.printIndent();
}
- if (property.flags.is_spread) {
+ if (property.flags.contains(.is_spread)) {
p.print("...");
} else {
- if (property.flags.is_computed) {
+ if (property.flags.contains(.is_computed)) {
p.print("[");
p.printExpr(property.key, .comma, ExprFlag.None());
p.print("]:");
@@ -2478,16 +2488,16 @@ pub fn NewPrinter(
p.printSpaceBeforeIdentifier();
const name = s.func.name orelse Global.panic("Internal error: expected func to have a name ref\n{s}", .{s});
const nameRef = name.ref orelse Global.panic("Internal error: expected func to have a name\n{s}", .{s});
- if (s.func.flags.is_export) {
+ if (s.func.flags.contains(.is_export)) {
if (!rewrite_esm_to_cjs) {
p.print("export ");
}
}
- if (s.func.flags.is_async) {
+ if (s.func.flags.contains(.is_async)) {
p.print("async ");
}
p.print("function");
- if (s.func.flags.is_generator) {
+ if (s.func.flags.contains(.is_generator)) {
p.print("*");
p.printSpace();
}
@@ -2496,7 +2506,7 @@ pub fn NewPrinter(
p.printSymbol(nameRef);
p.printFunc(s.func);
- // if (rewrite_esm_to_cjs and s.func.flags.is_export) {
+ // if (rewrite_esm_to_cjs and s.func.flags.contains(.is_export)) {
// p.printSemicolonAfterStatement();
// p.print("var ");
// p.printSymbol(nameRef);
@@ -2507,7 +2517,7 @@ pub fn NewPrinter(
p.printNewline();
// }
- if (rewrite_esm_to_cjs and s.func.flags.is_export) {
+ if (rewrite_esm_to_cjs and s.func.flags.contains(.is_export)) {
p.printIndent();
p.printBundledExport(p.renamer.nameForSymbol(nameRef), p.renamer.nameForSymbol(nameRef));
p.printSemicolonAfterStatement();
@@ -2587,12 +2597,12 @@ pub fn NewPrinter(
}
}
- if (func.func.flags.is_async) {
+ if (func.func.flags.contains(.is_async)) {
p.print("async ");
}
p.print("function");
- if (func.func.flags.is_generator) {
+ if (func.func.flags.contains(.is_generator)) {
p.print("*");
p.printSpace();
} else {
diff --git a/src/json_parser.zig b/src/json_parser.zig
index c2236228f..c17f5b177 100644
--- a/src/json_parser.zig
+++ b/src/json_parser.zig
@@ -84,7 +84,37 @@ const HashMapPool = struct {
}
};
+// This hack fixes using LLDB
fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
+ return JSONLikeParser_(
+ opts.is_json,
+ opts.allow_comments,
+ opts.allow_trailing_commas,
+ opts.ignore_leading_escape_sequences,
+ opts.ignore_trailing_escape_sequences,
+ opts.json_warn_duplicate_keys,
+ opts.was_originally_macro,
+ );
+}
+
+fn JSONLikeParser_(
+ opts_is_json: bool,
+ opts_allow_comments: bool,
+ opts_allow_trailing_commas: bool,
+ opts_ignore_leading_escape_sequences: bool,
+ opts_ignore_trailing_escape_sequences: bool,
+ opts_json_warn_duplicate_keys: bool,
+ opts_was_originally_macro: bool,
+) type {
+ const opts = js_lexer.JSONOptions{
+ .is_json = opts_is_json,
+ .allow_comments = opts_allow_comments,
+ .allow_trailing_commas = opts_allow_trailing_commas,
+ .ignore_leading_escape_sequences = opts_ignore_leading_escape_sequences,
+ .ignore_trailing_escape_sequences = opts_ignore_trailing_escape_sequences,
+ .json_warn_duplicate_keys = opts_json_warn_duplicate_keys,
+ .was_originally_macro = opts_was_originally_macro,
+ };
return struct {
const Lexer = js_lexer.NewLexer(if (LEXER_DEBUGGER_WORKAROUND) js_lexer.JSONOptions{} else opts);
@@ -228,8 +258,14 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
}
- const str = p.lexer.toEString();
+ const str = if (comptime force_utf8)
+ p.lexer.toUTF8EString()
+ else
+ p.lexer.toEString();
+
const key_range = p.lexer.range();
+ const key = p.e(str, key_range.loc);
+ try p.lexer.expect(.t_string_literal);
if (comptime opts.json_warn_duplicate_keys) {
const hash_key = str.hash();
@@ -242,12 +278,13 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
}
- const key = p.e(str, key_range.loc);
- try p.lexer.expect(.t_string_literal);
-
try p.lexer.expect(.t_colon);
const value = try p.parseExpr(false, force_utf8);
- properties.append(G.Property{ .key = key, .value = value }) catch unreachable;
+ properties.append(G.Property{
+ .key = key,
+ .value = value,
+ .kind = js_ast.G.Property.Kind.normal,
+ }) catch unreachable;
}
if (p.lexer.has_newline_before) {
diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig
index b1c5875b6..447409661 100644
--- a/src/libarchive/libarchive.zig
+++ b/src/libarchive/libarchive.zig
@@ -357,7 +357,7 @@ pub const Archive = struct {
pub fn hash(_: @This(), k: u64) u32 {
return @truncate(u32, k);
}
- pub fn eql(_: @This(), a: u64, b: u64) bool {
+ pub fn eql(_: @This(), a: u64, b: u64, _: usize) bool {
return a == b;
}
};
@@ -428,7 +428,7 @@ pub const Archive = struct {
const size = @intCast(usize, std.math.max(lib.archive_entry_size(entry), 0));
if (size > 0) {
- var opened = dir.openFileZ(pathname, .{ .write = true }) catch continue :loop;
+ var opened = dir.openFileZ(pathname, .{ .mode = .write_only }) catch continue :loop;
var stat = try opened.stat();
if (stat.size > 0) {
diff --git a/src/linker.zig b/src/linker.zig
index b82fa03ec..e4e00b875 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -97,7 +97,7 @@ pub const Linker = struct {
file_path: Fs.Path,
fd: ?FileDescriptorType,
) !Fs.FileSystem.RealFS.ModKey {
- var file: std.fs.File = if (fd) |_fd| std.fs.File{ .handle = _fd } else try std.fs.openFileAbsolute(file_path.text, .{ .read = true });
+ var file: std.fs.File = if (fd) |_fd| std.fs.File{ .handle = _fd } else try std.fs.openFileAbsolute(file_path.text, .{ .mode = .read_only });
Fs.FileSystem.setMaxFd(file.handle);
const modkey = try Fs.FileSystem.RealFS.ModKey.generate(&this.fs.fs, file_path.text, file);
diff --git a/src/node_fallbacks.zig b/src/node_fallbacks.zig
index aff3114be..d3ffad3a8 100644
--- a/src/node_fallbacks.zig
+++ b/src/node_fallbacks.zig
@@ -3,6 +3,7 @@ const string = @import("./string_types.zig").string;
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
const logger = @import("./logger.zig");
const Fs = @import("./fs.zig");
+const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
const assert_code: string = @embedFile("./node-fallbacks/out/assert.js");
const buffer_code: string = @embedFile("./node-fallbacks/out/buffer.js");
@@ -443,7 +444,7 @@ pub const FallbackModule = struct {
};
};
-pub const Map = std.ComptimeStringMap(FallbackModule, .{
+pub const Map = ComptimeStringMap(FallbackModule, .{
&.{ "assert", FallbackModule.assert },
&.{ "buffer", FallbackModule.buffer },
&.{ "console", FallbackModule.console },
diff --git a/src/open.zig b/src/open.zig
index b73bc282e..c0db23333 100644
--- a/src/open.zig
+++ b/src/open.zig
@@ -10,6 +10,7 @@ const default_allocator = _global.default_allocator;
const C = _global.C;
const std = @import("std");
const DotEnv = @import("env_loader.zig");
+const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
const opener = switch (@import("builtin").target.os.tag) {
.macos => "/usr/bin/open",
.windows => "start",
@@ -49,7 +50,7 @@ pub const Editor = enum(u8) {
const StringMap = std.EnumMap(Editor, string);
const StringArrayMap = std.EnumMap(Editor, []const [:0]const u8);
- const name_map = std.ComptimeStringMap(Editor, .{
+ const name_map = ComptimeStringMap(Editor, .{
.{ "sublime", Editor.sublime },
.{ "subl", Editor.sublime },
.{ "vscode", Editor.vscode },
diff --git a/src/options.zig b/src/options.zig
index c0808d96e..cf849df0d 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -29,6 +29,7 @@ const Runtime = @import("./runtime.zig").Runtime;
const Analytics = @import("./analytics/analytics_thread.zig");
const MacroRemap = @import("./resolver/package_json.zig").MacroMap;
const DotEnv = @import("./env_loader.zig");
+const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
const assert = std.debug.assert;
@@ -290,7 +291,7 @@ pub const ExternalModules = struct {
"zlib",
};
- pub const NodeBuiltinsMap = std.ComptimeStringMap(bool, .{
+ pub const NodeBuiltinsMap = ComptimeStringMap(bool, .{
.{ "_http_agent", true },
.{ "_http_client", true },
.{ "_http_common", true },
@@ -362,7 +363,7 @@ pub const ModuleType = enum {
cjs,
esm,
- pub const List = std.ComptimeStringMap(ModuleType, .{
+ pub const List = ComptimeStringMap(ModuleType, .{
.{ "commonjs", ModuleType.cjs },
.{ "module", ModuleType.esm },
});
@@ -739,7 +740,7 @@ pub const Loader = enum(u4) {
}
};
-pub const defaultLoaders = std.ComptimeStringMap(Loader, .{
+pub const defaultLoaders = ComptimeStringMap(Loader, .{
.{ ".jsx", Loader.jsx },
.{ ".json", Loader.json },
.{ ".js", Loader.jsx },
@@ -1310,7 +1311,7 @@ pub const BundleOptions = struct {
if (bundle_path.len > 0) {
load_bundle: {
const pretty_path = fs.relativeTo(bundle_path);
- var bundle_file = std.fs.openFileAbsolute(bundle_path, .{ .read = true, .write = true }) catch |err| {
+ var bundle_file = std.fs.openFileAbsolute(bundle_path, .{ .mode = .read_write }) catch |err| {
if (is_generating_bundle) {
break :load_bundle;
}
@@ -1492,7 +1493,7 @@ pub const BundleOptions = struct {
if (opts.routes.static_dir_enabled and should_try_to_find_a_index_html_file) {
const dir = opts.routes.static_dir_handle.?;
- var index_html_file = dir.openFile("index.html", .{ .read = true }) catch |err| brk: {
+ var index_html_file = dir.openFile("index.html", .{ .mode = .read_only }) catch |err| brk: {
switch (err) {
error.FileNotFound => {},
else => {
@@ -1522,7 +1523,7 @@ pub const BundleOptions = struct {
abs_buf[full_path.len] = 0;
var abs_buf_z: [:0]u8 = abs_buf[0..full_path.len :0];
- const file = std.fs.openFileAbsoluteZ(abs_buf_z, .{ .read = true }) catch |err| {
+ const file = std.fs.openFileAbsoluteZ(abs_buf_z, .{ .mode = .read_only }) catch |err| {
switch (err) {
error.FileNotFound => {},
else => {
@@ -1737,7 +1738,7 @@ pub const OutputFile = struct {
const fd_out = file_out.handle;
var do_close = false;
// TODO: close file_out on error
- const fd_in = (try std.fs.openFileAbsolute(file.input.text, .{ .read = true })).handle;
+ const fd_in = (try std.fs.openFileAbsolute(file.input.text, .{ .mode = .read_only })).handle;
if (Environment.isWindows) {
Fs.FileSystem.setMaxFd(fd_out);
diff --git a/src/resolver/data_url.zig b/src/resolver/data_url.zig
index 486f60608..46638d10b 100644
--- a/src/resolver/data_url.zig
+++ b/src/resolver/data_url.zig
@@ -13,6 +13,7 @@ const std = @import("std");
const assert = std.debug.assert;
const mem = std.mem;
const Allocator = mem.Allocator;
+const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
// https://github.com/Vexu/zuri/blob/master/src/zuri.zig#L61-L127
pub const PercentEncoding = struct {
@@ -83,7 +84,7 @@ pub const MimeType = enum {
TextJavaScript,
ApplicationJSON,
- pub const Map = std.ComptimeStringMap(MimeType, .{
+ pub const Map = ComptimeStringMap(MimeType, .{
.{ "text/css", MimeType.TextCSS },
.{ "text/javascript", MimeType.TextJavaScript },
.{ "application/json", MimeType.ApplicationJSON },
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index d3d8856cb..cd907ea2c 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -575,7 +575,7 @@ pub const Resolver = struct {
defer {
if (FeatureFlags.tracing) {
- if (timer) |time| {
+ if (timer) |*time| {
// technically, this should be an atomic op
r.elapsed += time.read();
}
@@ -672,6 +672,13 @@ pub const Resolver = struct {
return result;
}
+ const ModuleTypeMap = _global.ComptimeStringMap(options.ModuleType, .{
+ .{ ".mjs", options.ModuleType.esm },
+ .{ ".mts", options.ModuleType.esm },
+ .{ ".cjs", options.ModuleType.cjs },
+ .{ ".cts", options.ModuleType.cjs },
+ });
+
pub fn finalizeResult(r: *ThisResolver, result: *Result, kind: ast.ImportKind) !void {
if (result.is_external) return;
@@ -693,12 +700,7 @@ pub const Resolver = struct {
// If you use cjs or cts, then you're using cjs
// This should win out over the module type from package.json
if (!kind.isFromCSS() and module_type == .unknown and path.name.ext.len == 4) {
- const FourLetterMatcher = strings.ExactSizeMatcher(4);
- module_type = switch (FourLetterMatcher.match(path.name.ext)) {
- FourLetterMatcher.case(".mjs"), FourLetterMatcher.case(".mts") => .esm,
- FourLetterMatcher.case(".cjs"), FourLetterMatcher.case(".cts") => .cjs,
- else => .unknown,
- };
+ module_type = ModuleTypeMap.getWithLength(path.name.ext, 4) orelse .unknown;
}
if (dir.getEntries()) |entries| {
@@ -720,7 +722,7 @@ pub const Resolver = struct {
if (query.entry.cache.fd == 0) {
buf[out.len] = 0;
const span = buf[0..out.len :0];
- var file = try std.fs.openFileAbsoluteZ(span, .{ .read = true });
+ var file = try std.fs.openFileAbsoluteZ(span, .{ .mode = .read_only });
if (comptime !FeatureFlags.store_file_descriptors) {
out = try std.os.getFdPath(query.entry.cache.fd, &buf);
diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig
index 82b658d69..cbf0b5b67 100644
--- a/src/resolver/tsconfig_json.zig
+++ b/src/resolver/tsconfig_json.zig
@@ -14,6 +14,7 @@ const logger = @import("../logger.zig");
const cache = @import("../cache.zig");
const js_ast = @import("../js_ast.zig");
const js_lexer = @import("../js_lexer.zig");
+const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
// Heuristic: you probably don't have 100 of these
// Probably like 5-10
@@ -60,7 +61,7 @@ pub const TSConfigJSON = struct {
remove,
invalid,
- pub const List = std.ComptimeStringMap(ImportsNotUsedAsValue, .{
+ pub const List = ComptimeStringMap(ImportsNotUsedAsValue, .{
.{ "preserve", ImportsNotUsedAsValue.preserve },
.{ "error", ImportsNotUsedAsValue.err },
.{ "remove", ImportsNotUsedAsValue.remove },
diff --git a/src/router.zig b/src/router.zig
index cad7d182b..b6754156c 100644
--- a/src/router.zig
+++ b/src/router.zig
@@ -718,7 +718,7 @@ pub const Route = struct {
abs_path_str = FileSystem.instance.absBuf(&parts, &route_file_buf);
route_file_buf[abs_path_str.len] = 0;
var buf = route_file_buf[0..abs_path_str.len :0];
- file = std.fs.openFileAbsoluteZ(buf, .{ .read = true }) catch |err| {
+ file = std.fs.openFileAbsoluteZ(buf, .{ .mode = .read_only }) catch |err| {
log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{s} opening route: {s}", .{ @errorName(err), abs_path_str }) catch unreachable;
return null;
};
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index ededed769..9e90093af 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -403,7 +403,7 @@ pub fn eqlComptimeIgnoreLen(self: string, comptime alt: anytype) bool {
return eqlComptimeCheckLenWithType(u8, self, alt, false);
}
-inline fn eqlComptimeCheckLenWithType(comptime Type: type, a: []const Type, comptime b: anytype, comptime check_len: bool) bool {
+pub inline fn eqlComptimeCheckLenWithType(comptime Type: type, a: []const Type, comptime b: anytype, comptime check_len: bool) bool {
@setEvalBranchQuota(9999);
if (comptime check_len) {
if (comptime b.len == 0) {
diff --git a/src/string_mutable.zig b/src/string_mutable.zig
index ad9765a26..e8a46af9b 100644
--- a/src/string_mutable.zig
+++ b/src/string_mutable.zig
@@ -39,13 +39,15 @@ pub const MutableString = struct {
return bytes.len;
}
- pub fn writeAll(self: *MutableString, bytes: string) !usize {
- try self.list.appendSlice(self.allocator, bytes);
- return self.list.items.len;
+ pub fn bufferedWriter(self: *MutableString) BufferedWriter {
+ return BufferedWriter{ .context = self };
}
pub fn init(allocator: std.mem.Allocator, capacity: usize) !MutableString {
- return MutableString{ .allocator = allocator, .list = try std.ArrayListUnmanaged(u8).initCapacity(allocator, capacity) };
+ return MutableString{ .allocator = allocator, .list = if (capacity > 0)
+ try std.ArrayListUnmanaged(u8).initCapacity(allocator, capacity)
+ else
+ std.ArrayListUnmanaged(u8){} };
}
pub fn initCopy(allocator: std.mem.Allocator, str: anytype) !MutableString {
@@ -223,6 +225,54 @@ pub const MutableString = struct {
pub fn eql(self: *MutableString, other: anytype) bool {
return std.mem.eql(u8, self.list.items, other);
}
+
+ pub const BufferedWriter = struct {
+ context: *MutableString,
+ buffer: [max]u8 = undefined,
+ pos: usize = 0,
+
+ const max = 2048;
+
+ pub const Writer = std.io.Writer(*BufferedWriter, anyerror, BufferedWriter.writeAll);
+
+ inline fn remain(this: *BufferedWriter) []u8 {
+ return this.buffer[this.pos..];
+ }
+
+ pub fn flush(this: *BufferedWriter) !void {
+ _ = try this.context.writeAll(this.buffer[0..this.pos]);
+ this.pos = 0;
+ }
+
+ pub fn writeAll(this: *BufferedWriter, bytes: []const u8) anyerror!usize {
+ var pending = bytes;
+
+ if (pending.len >= max) {
+ try this.flush();
+ try this.context.append(pending);
+ return pending.len;
+ }
+
+ if (pending.len > 0) {
+ if (pending.len + this.pos > max) {
+ try this.flush();
+ }
+ @memcpy(this.remain().ptr, pending.ptr, pending.len);
+ this.pos += pending.len;
+ }
+
+ return pending.len;
+ }
+
+ pub fn writer(this: *BufferedWriter) BufferedWriter.Writer {
+ return BufferedWriter.Writer{ .context = this };
+ }
+ };
+
+ pub fn writeAll(self: *MutableString, bytes: string) !usize {
+ try self.list.appendSlice(self.allocator, bytes);
+ return bytes.len;
+ }
};
test "MutableString" {
@@ -238,3 +288,15 @@ test "MutableString.ensureValidIdentifier" {
try std.testing.expectEqualStrings("jquery", try MutableString.ensureValidIdentifier("jquery", alloc));
try std.testing.expectEqualStrings("jquery_foo", try MutableString.ensureValidIdentifier("jquery😋foo", alloc));
}
+
+test "MutableString BufferedWriter" {
+ const alloc = std.heap.page_allocator;
+
+ var str = try MutableString.init(alloc, 0);
+ var buffered_writer = str.bufferedWriter();
+ var writer = buffered_writer.writer();
+ try writer.writeAll("hello world hello world hello world hello world hello world hello world");
+ try writer.context.flush();
+ str = writer.context.context.*;
+ try std.testing.expectEqualStrings("hello world hello world hello world hello world hello world hello world", str.toOwnedSlice());
+}
diff --git a/src/watcher.zig b/src/watcher.zig
index a5775071b..5aaa56acb 100644
--- a/src/watcher.zig
+++ b/src/watcher.zig
@@ -68,7 +68,6 @@ pub const INotify = struct {
pub fn name(this: *const INotifyEvent) [:0]u8 {
if (comptime Environment.allow_assert) std.debug.assert(this.name_len > 0);
-
// the name_len field is wrong
// it includes alignment / padding
// but it is a sentineled value
@@ -136,9 +135,7 @@ pub const INotify = struct {
var i: u32 = 0;
while (i < len) : (i += @sizeOf(INotifyEvent)) {
const event = @ptrCast(*const INotifyEvent, @alignCast(@alignOf(*const INotifyEvent), eventlist[i..][0..@sizeOf(INotifyEvent)]));
- if (event.name_len > 0) {
- i += event.name_len;
- }
+ i += event.name_len;
eventlist_ptrs[count] = event;
count += 1;