aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-05-30 12:50:08 -0700
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-05-30 12:50:08 -0700
commitd264d4e4ba40ae754765afcc3d134beb4521a5cd (patch)
tree33b7ba861e05570c5c848a6afcdcfbaf60e9b217
parent534f9cf509a1816734ff77af0ae132069b25761c (diff)
downloadbun-d264d4e4ba40ae754765afcc3d134beb4521a5cd.tar.gz
bun-d264d4e4ba40ae754765afcc3d134beb4521a5cd.tar.zst
bun-d264d4e4ba40ae754765afcc3d134beb4521a5cd.zip
Fix Define, JSX, use more pointers for property access to minimize calls to memmove
Former-commit-id: 09ceececba22d159805662d69b8e3aad83d20590
-rw-r--r--.vscode/launch.json2
-rw-r--r--src/ast/base.zig4
-rw-r--r--src/bundler.zig29
-rw-r--r--src/cli.zig6
-rw-r--r--src/defines.zig332
-rw-r--r--src/js_ast.zig448
-rw-r--r--src/js_parser/js_parser.zig160
-rw-r--r--src/js_printer.zig167
-rw-r--r--src/resolver/package_json.zig11
9 files changed, 364 insertions, 795 deletions
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 3cdacdab8..774ec4e7e 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -26,7 +26,7 @@
"request": "launch",
"name": "Dev Launch",
"program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
- "args": ["./simple.jsx", "--resolve=disable"],
+ "args": ["./defines.jsx", "--resolve=disable"],
"cwd": "${workspaceFolder}/src/test/fixtures",
"console": "internalConsole"
},
diff --git a/src/ast/base.zig b/src/ast/base.zig
index 361339f7f..59d7c6c12 100644
--- a/src/ast/base.zig
+++ b/src/ast/base.zig
@@ -27,8 +27,8 @@ pub const Ref = packed struct {
inner_index: Int = 0,
is_source_contents_slice: bool = false,
- pub fn hash(key: Ref) u64 {
- @compileError("Dont call");
+ pub fn hash(key: Ref) u32 {
+ return @truncate(u32, std.hash.Wyhash.hash(0, std.mem.asBytes(&key)));
}
// 2 bits of padding for whatever is the parent
diff --git a/src/bundler.zig b/src/bundler.zig
index e0462e74b..5b92af3f9 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -156,8 +156,7 @@ pub const Bundler = struct {
}
}
- pub fn processImportRecord(bundler: *Bundler, source_dir: string, import_record: *ImportRecord) !void {
- var resolve_result = try bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind);
+ pub fn processImportRecord(bundler: *Bundler, source_dir: string, resolve_result: *Resolver.Resolver.Result, import_record: *ImportRecord) !void {
// extremely naive.
resolve_result.is_from_node_modules = strings.contains(resolve_result.path_pair.primary.text, "/node_modules");
@@ -174,13 +173,13 @@ pub const Bundler = struct {
// Run the resolver
// Don't parse/print automatically.
if (bundler.options.resolve_mode != .lazy) {
- try bundler.enqueueResolveResult(&resolve_result);
+ try bundler.enqueueResolveResult(resolve_result);
}
import_record.path = try bundler.generateImportPath(source_dir, resolve_result.path_pair.primary.text);
}
- pub fn resolveResultHashKey(bundler: *Bundler, resolve_result: *Resolver.Resolver.Result) string {
+ pub fn resolveResultHashKey(bundler: *Bundler, resolve_result: *const Resolver.Resolver.Result) string {
var hash_key = resolve_result.path_pair.primary.text;
// Shorter hash key is faster to hash
@@ -191,7 +190,7 @@ pub const Bundler = struct {
return hash_key;
}
- pub fn enqueueResolveResult(bundler: *Bundler, resolve_result: *Resolver.Resolver.Result) !void {
+ pub fn enqueueResolveResult(bundler: *Bundler, resolve_result: *const Resolver.Resolver.Result) !void {
const hash_key = bundler.resolveResultHashKey(resolve_result);
const get_or_put_entry = try bundler.resolve_results.backing.getOrPut(hash_key);
@@ -225,10 +224,19 @@ pub const Bundler = struct {
const ast = result.ast;
for (ast.import_records) |*import_record| {
- bundler.processImportRecord(
- std.fs.path.dirname(file_path.text) orelse file_path.text,
- import_record,
- ) catch |err| {
+ const source_dir = std.fs.path.dirname(file_path.text) orelse file_path.text;
+
+ if (bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |*resolved_import| {
+ bundler.processImportRecord(
+ source_dir,
+ resolved_import,
+ import_record,
+ ) catch continue;
+
+ // "Linking"
+ // 1. Associate an ImportRecord with NamedImports
+ // 2. If there is a default import, import the runtime wrapper
+ } else |err| {
switch (err) {
error.ModuleNotFound => {
if (Resolver.Resolver.isPackagePath(import_record.path.text)) {
@@ -259,13 +267,14 @@ pub const Bundler = struct {
import_record.path.text,
},
);
+ continue;
}
},
else => {
continue;
},
}
- };
+ }
}
},
else => {},
diff --git a/src/cli.zig b/src/cli.zig
index fcae111c5..572e54460 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -391,9 +391,9 @@ pub const Cli = struct {
}
if (isDebug) {
- Output.println("Expr count: {d}", .{js_ast.Expr.icount});
- Output.println("Stmt count: {d}", .{js_ast.Stmt.icount});
-
+ Output.println("Expr count: {d}", .{js_ast.Expr.icount});
+ Output.println("Stmt count: {d}", .{js_ast.Stmt.icount});
+ Output.println("Binding count: {d}", .{js_ast.Binding.icount});
Output.println("File Descriptors: {d} / {d}", .{
fs.FileSystem.max_fd,
open_file_limit,
diff --git a/src/defines.zig b/src/defines.zig
index c16bf9edb..08447cde1 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -63,63 +63,63 @@ pub const DefineData = struct {
var user_defines = UserDefines.init(allocator);
try user_defines.ensureCapacity(defines.count());
- // var iter = defines.iterator();
- // while (iter.next()) |entry| {
- // var splitter = std.mem.split(entry.key, ".");
- // while (splitter.next()) |part| {
- // if (!js_lexer.isIdentifier(part)) {
- // if (strings.eql(part, entry.key)) {
- // try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
- // } else {
- // try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
- // }
- // break;
- // }
- // }
-
- // if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
- // var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
- // ident.ref = Ref.None;
- // ident.can_be_removed_if_unused = true;
- // user_defines.putAssumeCapacity(
- // entry.key,
- // DefineData{
- // .value = js_ast.Expr.Data{ .e_identifier = ident },
- // .original_name = entry.value,
- // .can_be_removed_if_unused = true,
- // },
- // );
- // // user_defines.putAssumeCapacity(
- // // entry.key,
- // // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
- // // );
- // continue;
- // }
- // var _log = log;
- // var source = logger.Source{
- // .contents = entry.value,
- // .path = defines_path,
- // .identifier_name = "defines",
- // .key_path = fs.Path.initWithNamespace("defines", "internal"),
- // };
- // var expr = try json_parser.ParseJSON(&source, _log, allocator);
- // var data: js_ast.Expr.Data = undefined;
- // switch (expr.data) {
- // .e_missing => {
- // continue;
- // },
- // .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
- // data = expr.data;
- // },
- // else => {
- // continue;
- // },
- // }
-
- // user_defines.putAssumeCapacity(entry.key, DefineData{
- // .value = data,
- // });
- // }
+ var iter = defines.iterator();
+ while (iter.next()) |entry| {
+ var splitter = std.mem.split(entry.key, ".");
+ while (splitter.next()) |part| {
+ if (!js_lexer.isIdentifier(part)) {
+ if (strings.eql(part, entry.key)) {
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
+ } else {
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
+ }
+ break;
+ }
+ }
+
+ if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
+ var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
+ ident.ref = Ref.None;
+ ident.can_be_removed_if_unused = true;
+ user_defines.putAssumeCapacity(
+ entry.key,
+ DefineData{
+ .value = js_ast.Expr.Data{ .e_identifier = ident },
+ .original_name = entry.value,
+ .can_be_removed_if_unused = true,
+ },
+ );
+ // user_defines.putAssumeCapacity(
+ // entry.key,
+ // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
+ // );
+ continue;
+ }
+ var _log = log;
+ var source = logger.Source{
+ .contents = entry.value,
+ .path = defines_path,
+ .identifier_name = "defines",
+ .key_path = fs.Path.initWithNamespace("defines", "internal"),
+ };
+ var expr = try json_parser.ParseJSON(&source, _log, allocator);
+ var data: js_ast.Expr.Data = undefined;
+ switch (expr.data) {
+ .e_missing => {
+ continue;
+ },
+ .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
+ data = expr.data;
+ },
+ else => {
+ continue;
+ },
+ }
+
+ user_defines.putAssumeCapacity(entry.key, DefineData{
+ .value = data,
+ });
+ }
return user_defines;
}
@@ -147,6 +147,10 @@ pub const DotDefine = struct {
data: DefineData,
};
+// var nan_val = try allocator.create(js_ast.E.Number);
+var nan_val = js_ast.E.Number{ .value = std.math.nan_f64 };
+var inf_val = js_ast.E.Number{ .value = std.math.inf_f64 };
+
pub const Define = struct {
identifiers: std.StringHashMap(IdentifierDefine),
dots: std.StringHashMap([]DotDefine),
@@ -157,118 +161,110 @@ pub const Define = struct {
define.allocator = allocator;
define.identifiers = std.StringHashMap(IdentifierDefine).init(allocator);
define.dots = std.StringHashMap([]DotDefine).init(allocator);
+ try define.identifiers.ensureCapacity(641);
+ try define.dots.ensureCapacity(64);
+
+ var val = js_ast.Expr.Data{ .e_undefined = .{} };
+ var ident_define = IdentifierDefine{
+ .value = val,
+ };
+ var value_define = DefineData{ .value = val, .valueless = true };
+ // Step 1. Load the globals into the hash tables
+ for (GlobalDefinesKey) |global| {
+ if (global.len == 1) {
+
+ // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
+ define.identifiers.putAssumeCapacity(global[0], value_define);
+ } else {
+ const key = global[global.len - 1];
+ // TODO: move this to comptime
+ // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
+ if (define.dots.getEntry(key)) |entry| {
+ var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
+ list.appendSliceAssumeCapacity(entry.value);
+ list.appendAssumeCapacity(DotDefine{
+ .parts = global[0..global.len],
+ .data = value_define,
+ });
+
+ define.dots.putAssumeCapacity(key, list.toOwnedSlice());
+ } else {
+ var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
+ list.appendAssumeCapacity(DotDefine{
+ .parts = global[0..global.len],
+ .data = value_define,
+ });
+
+ define.dots.putAssumeCapacity(key, list.toOwnedSlice());
+ }
+ }
+ }
+
+ // Step 2. Swap in certain literal values because those can be constant folded
+ define.identifiers.putAssumeCapacity("undefined", value_define);
+ define.identifiers.putAssumeCapacity("NaN", DefineData{
+ .value = js_ast.Expr.Data{ .e_number = &nan_val },
+ });
+ define.identifiers.putAssumeCapacity("Infinity", DefineData{
+ .value = js_ast.Expr.Data{ .e_number = &inf_val },
+ });
+
+ // Step 3. Load user data into hash tables
+ // At this stage, user data has already been validated.
+ if (_user_defines) |user_defines| {
+ var iter = user_defines.iterator();
+ while (iter.next()) |user_define| {
+ // If it has a dot, then it's a DotDefine.
+ // e.g. process.env.NODE_ENV
+ if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
+ const tail = user_define.key[last_dot + 1 .. user_define.key.len];
+ const remainder = user_define.key[0..last_dot];
+ const count = std.mem.count(u8, remainder, ".") + 1;
+ var parts = try allocator.alloc(string, count + 1);
+ var splitter = std.mem.split(remainder, ".");
+ var i: usize = 0;
+ while (splitter.next()) |split| : (i += 1) {
+ parts[i] = split;
+ }
+ parts[i] = tail;
+ var didFind = false;
+ var initial_values: []DotDefine = &([_]DotDefine{});
+
+ // "NODE_ENV"
+ if (define.dots.getEntry(tail)) |entry| {
+ for (entry.value) |*part| {
+ // ["process", "env"] === ["process", "env"] (if that actually worked)
+ if (arePartsEqual(part.parts, parts)) {
+ part.data = part.data.merge(user_define.value);
+ didFind = true;
+ break;
+ }
+ }
+
+ initial_values = entry.value;
+ }
+
+ if (!didFind) {
+ var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
+ if (initial_values.len > 0) {
+ list.appendSliceAssumeCapacity(initial_values);
+ }
+
+ list.appendAssumeCapacity(DotDefine{
+ .data = user_define.value,
+ // TODO: do we need to allocate this?
+ .parts = parts,
+ });
+ try define.dots.put(tail, list.toOwnedSlice());
+ }
+ } else {
+ // e.g. IS_BROWSER
+ try define.identifiers.put(user_define.key, user_define.value);
+ }
+ }
+ }
+
return define;
- // try define.identifiers.ensureCapacity(641);
- // try define.dots.ensureCapacity(64);
-
- // var undefined_val = try allocator.create(js_ast.E.Undefined);
- // var val = js_ast.Expr.Data{ .e_undefined = undefined_val };
- // var ident_define = IdentifierDefine{
- // .value = val,
- // };
- // var value_define = DefineData{ .value = val, .valueless = true };
- // // Step 1. Load the globals into the hash tables
- // for (GlobalDefinesKey) |global| {
- // if (global.len == 1) {
-
- // // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
- // define.identifiers.putAssumeCapacity(global[0], value_define);
- // } else {
- // const key = global[global.len - 1];
- // // TODO: move this to comptime
- // // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
- // if (define.dots.getEntry(key)) |entry| {
- // var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
- // list.appendSliceAssumeCapacity(entry.value);
- // list.appendAssumeCapacity(DotDefine{
- // .parts = global[0..global.len],
- // .data = value_define,
- // });
-
- // define.dots.putAssumeCapacity(key, list.toOwnedSlice());
- // } else {
- // var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
- // list.appendAssumeCapacity(DotDefine{
- // .parts = global[0..global.len],
- // .data = value_define,
- // });
-
- // define.dots.putAssumeCapacity(key, list.toOwnedSlice());
- // }
- // }
- // }
-
- // var nan_val = try allocator.create(js_ast.E.Number);
- // nan_val.value = std.math.nan_f64;
-
- // var inf_val = try allocator.create(js_ast.E.Number);
- // inf_val.value = std.math.inf_f64;
-
- // // Step 2. Swap in certain literal values because those can be constant folded
- // define.identifiers.putAssumeCapacity("undefined", value_define);
- // define.identifiers.putAssumeCapacity("NaN", DefineData{
- // .value = js_ast.Expr.Data{ .e_number = nan_val },
- // });
- // define.identifiers.putAssumeCapacity("Infinity", DefineData{
- // .value = js_ast.Expr.Data{ .e_number = inf_val },
- // });
-
- // // Step 3. Load user data into hash tables
- // // At this stage, user data has already been validated.
- // if (_user_defines) |user_defines| {
- // var iter = user_defines.iterator();
- // while (iter.next()) |user_define| {
- // // If it has a dot, then it's a DotDefine.
- // // e.g. process.env.NODE_ENV
- // if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
- // const tail = user_define.key[last_dot + 1 .. user_define.key.len];
- // const remainder = user_define.key[0..last_dot];
- // const count = std.mem.count(u8, remainder, ".") + 1;
- // var parts = try allocator.alloc(string, count + 1);
- // var splitter = std.mem.split(remainder, ".");
- // var i: usize = 0;
- // while (splitter.next()) |split| : (i += 1) {
- // parts[i] = split;
- // }
- // parts[i] = tail;
- // var didFind = false;
- // var initial_values: []DotDefine = &([_]DotDefine{});
-
- // // "NODE_ENV"
- // if (define.dots.getEntry(tail)) |entry| {
- // for (entry.value) |*part| {
- // // ["process", "env"] === ["process", "env"] (if that actually worked)
- // if (arePartsEqual(part.parts, parts)) {
- // part.data = part.data.merge(user_define.value);
- // didFind = true;
- // break;
- // }
- // }
-
- // initial_values = entry.value;
- // }
-
- // if (!didFind) {
- // var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
- // if (initial_values.len > 0) {
- // list.appendSliceAssumeCapacity(initial_values);
- // }
-
- // list.appendAssumeCapacity(DotDefine{
- // .data = user_define.value,
- // // TODO: do we need to allocate this?
- // .parts = parts,
- // });
- // try define.dots.put(tail, list.toOwnedSlice());
- // }
- // } else {
- // // e.g. IS_BROWSER
- // try define.identifiers.put(user_define.key, user_define.value);
- // }
- // }
- // }
-
- // return define;
}
};
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 774c8247f..d1c136847 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -24,6 +24,7 @@ pub fn NewBaseStore(comptime Union: anytype, comptime count: usize) type {
return struct {
const Allocator = std.mem.Allocator;
const Self = @This();
+
const Block = struct {
items: [count]UnionValueType align(max_align) = undefined,
used: usize = 0,
@@ -288,7 +289,10 @@ pub const Binding = struct {
b_missing,
};
+ pub var icount: usize = 0;
+
pub fn init(t: anytype, loc: logger.Loc) Binding {
+ icount += 1;
switch (@TypeOf(t)) {
*B.Identifier => {
return Binding{ .loc = loc, .data = B{ .b_identifier = t } };
@@ -312,6 +316,7 @@ pub const Binding = struct {
}
pub fn alloc(allocator: *std.mem.Allocator, t: anytype, loc: logger.Loc) Binding {
+ icount += 1;
switch (@TypeOf(t)) {
B.Identifier => {
var data = allocator.create(B.Identifier) catch unreachable;
@@ -334,9 +339,7 @@ pub const Binding = struct {
return Binding{ .loc = loc, .data = B{ .b_object = data } };
},
B.Missing => {
- var data = allocator.create(B.Missing) catch unreachable;
- data.* = t;
- return Binding{ .loc = loc, .data = B{ .b_missing = data } };
+ return Binding{ .loc = loc, .data = B{ .b_missing = .{} } };
},
else => {
@compileError("Invalid type passed to Binding.alloc");
@@ -350,7 +353,7 @@ pub const B = union(Binding.Tag) {
b_array: *B.Array,
b_property: *B.Property,
b_object: *B.Object,
- b_missing: *B.Missing,
+ b_missing: B.Missing,
pub const Identifier = struct {
ref: Ref,
@@ -1843,10 +1846,10 @@ pub const Expr = struct {
pub fn getMissing(exp: *const Expr) *E.Missing {
return exp.data.e_missing;
}
- pub fn getNumber(exp: *const Expr) *E.Number {
+ pub fn getNumber(exp: *const Expr) E.Number {
return exp.data.e_number;
}
- pub fn getBigInt(exp: *const Expr) *E.BigInt {
+ pub fn getBigInt(exp: *const Expr) E.BigInt {
return exp.data.e_big_int;
}
pub fn getObject(exp: *const Expr) *E.Object {
@@ -1855,7 +1858,7 @@ pub const Expr = struct {
pub fn getSpread(exp: *const Expr) *E.Spread {
return exp.data.e_spread;
}
- pub fn getString(exp: *const Expr) *E.String {
+ pub fn getString(exp: *const Expr) E.String {
return exp.data.e_string;
}
pub fn getTemplatePart(exp: *const Expr) *E.TemplatePart {
@@ -1890,11 +1893,11 @@ pub const Expr = struct {
if (std.meta.activeTag(expr.data) != .e_object) return null;
const obj = expr.getObject();
- for (obj.properties) |prop| {
+ for (obj.properties) |*prop| {
const value = prop.value orelse continue;
const key = prop.key orelse continue;
if (std.meta.activeTag(key.data) != .e_string) continue;
- const key_str: *const E.String = key.getString();
+ const key_str = key.data.e_string;
if (key_str.eql(string, name)) {
return Query{ .expr = value, .loc = key.loc };
}
@@ -1906,7 +1909,7 @@ pub const Expr = struct {
pub fn asString(expr: *const Expr, allocator: *std.mem.Allocator) ?string {
if (std.meta.activeTag(expr.data) != .e_string) return null;
- const key_str: *const E.String = expr.getString();
+ const key_str = expr.data.e_string;
return if (key_str.isUTF8()) key_str.utf8 else key_str.string(allocator) catch null;
}
@@ -1916,9 +1919,7 @@ pub const Expr = struct {
) ?bool {
if (std.meta.activeTag(expr.data) != .e_boolean) return null;
- const obj = expr.getBoolean();
-
- return obj.value;
+ return expr.data.e_boolean.value;
}
pub const EFlags = enum { none, ts_decorator };
@@ -1997,6 +1998,12 @@ pub const Expr = struct {
}
pub var icount: usize = 0;
+
+ // We don't need to dynamically allocate booleans
+ var true_bool = E.Boolean{ .value = true };
+ var false_bool = E.Boolean{ .value = false };
+ var bool_values = [_]*E.Boolean{ &false_bool, &true_bool };
+
pub fn init(exp: anytype, loc: logger.Loc) Expr {
icount += 1;
const st = exp.*;
@@ -2045,7 +2052,7 @@ pub const Expr = struct {
return Expr{
.loc = loc,
.data = Data{
- .e_boolean = Data.Store.All.append(@TypeOf(st), st),
+ .e_boolean = bool_values[@boolToInt(st.value)],
},
};
},
@@ -2101,7 +2108,7 @@ pub const Expr = struct {
return Expr{
.loc = loc,
.data = Data{
- .e_import_meta = Data.Store.All.append(@TypeOf(st), st),
+ .e_import_meta = st,
},
};
},
@@ -2284,304 +2291,26 @@ pub const Expr = struct {
},
};
},
-
- else => {
- @compileError("Invalid type passed to Expr.init");
- },
- }
- }
-
- pub fn alloc(allocator: *std.mem.Allocator, st: anytype, loc: logger.Loc) Expr {
- icount += 1;
- switch (@TypeOf(st)) {
- E.Array => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_array = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Class => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_class = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Unary => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_unary = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Binary => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_binary = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.This => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_this = st,
- },
- };
- },
- E.Boolean => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_boolean = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Super => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_super = st,
- },
- };
- },
- E.Null => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_null = st,
- },
- };
- },
- E.Undefined => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_undefined = st,
- },
- };
- },
- E.New => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_new = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.NewTarget => {
- return Expr{
- .loc = loc,
- .data = Data{ .e_new_target = st },
- };
- },
- E.Function => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_function = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.ImportMeta => {
+ *E.String => {
return Expr{
.loc = loc,
.data = Data{
- .e_import_meta = st,
- },
- };
- },
- E.Call => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_call = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Dot => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_dot = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Index => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_index = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Arrow => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_arrow = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Identifier => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_identifier = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.ImportIdentifier => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_import_identifier = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.PrivateIdentifier => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_private_identifier = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.JSXElement => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_jsx_element = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Missing => {
- return Expr{ .loc = loc, .data = Data{ .e_missing = E.Missing{} } };
- },
- E.Number => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_number = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.BigInt => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_big_int = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Object => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_object = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Spread => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_spread = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.String => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_string = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.TemplatePart => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_template_part = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Template => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_template = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.RegExp => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_reg_exp = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Await => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_await = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Yield => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_yield = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.If => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_if = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.RequireOrRequireResolve => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_require_or_require_resolve = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Import => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_import = Data.Store.All.append(@TypeOf(st), st),
- },
- };
- },
- E.Require => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_require = Data.Store.All.append(@TypeOf(st), st),
+ .e_string = st,
},
};
},
else => {
- @compileError("Invalid type passed to Expr.init");
+ @compileError("Invalid type passed to Expr.init: " ++ @typeName(@TypeOf(st)));
},
}
}
+ pub fn alloc(allocator: *std.mem.Allocator, st: anytype, loc: logger.Loc) Expr {
+ icount += 1;
+ return init(&st, loc);
+ }
+
pub const Tag = enum(u6) {
e_array,
e_unary,
@@ -2978,16 +2707,13 @@ pub const Expr = struct {
return true;
},
- .e_if => {
- const ex = a.getIf();
+ .e_if => |ex| {
return isBoolean(ex.yes) and isBoolean(ex.no);
},
- .e_unary => {
- const ex = a.getUnary();
+ .e_unary => |ex| {
return ex.op == .un_not or ex.op == .un_delete;
},
- .e_binary => {
- const ex = a.getBinary();
+ .e_binary => |ex| {
switch (ex.op) {
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => {
return true;
@@ -3036,17 +2762,14 @@ pub const Expr = struct {
.e_null, .e_undefined => {
return expr.at(E.Boolean{ .value = true }, allocator);
},
- .e_boolean => {
- const b = expr.getBoolean();
+ .e_boolean => |b| {
return expr.at(E.Boolean{ .value = b.value }, allocator);
},
- .e_number => {
- const n = expr.getNumber();
+ .e_number => |n| {
return expr.at(E.Boolean{ .value = (n.value == 0 or std.math.isNan(n.value)) }, allocator);
},
- .e_big_int => {
- const b = expr.getBigInt();
- return expr.at(E.Boolean{ .value = strings.eql(b.value, "0") }, allocator);
+ .e_big_int => |b| {
+ return expr.at(E.Boolean{ .value = strings.eqlComptime(b.value, "0") }, allocator);
},
.e_function,
.e_arrow,
@@ -3055,14 +2778,12 @@ pub const Expr = struct {
return expr.at(E.Boolean{ .value = false }, allocator);
},
// "!!!a" => "!a"
- .e_unary => {
- const un = expr.getUnary();
+ .e_unary => |un| {
if (un.op == Op.Code.un_not and isBoolean(un.value)) {
return un.value;
}
},
- .e_binary => {
- const ex = expr.getBinary();
+ .e_binary => |ex| {
// TODO: evaluate whether or not it is safe to do this mutation since it's modifying in-place.
// Make sure that these transformations are all safe for special values.
// For example, "!(a < b)" is not the same as "a >= b" if a and/or b are
@@ -3122,7 +2843,7 @@ pub const Expr = struct {
e_unary: *E.Unary,
e_binary: *E.Binary,
e_class: *E.Class,
- e_boolean: *E.Boolean,
+
e_new: *E.New,
e_function: *E.Function,
e_call: *E.Call,
@@ -3133,11 +2854,10 @@ pub const Expr = struct {
e_import_identifier: *E.ImportIdentifier,
e_private_identifier: *E.PrivateIdentifier,
e_jsx_element: *E.JSXElement,
- e_number: *E.Number,
- e_big_int: *E.BigInt,
+
e_object: *E.Object,
e_spread: *E.Spread,
- e_string: *E.String,
+
e_template_part: *E.TemplatePart,
e_template: *E.Template,
e_reg_exp: *E.RegExp,
@@ -3148,6 +2868,11 @@ pub const Expr = struct {
e_require_or_require_resolve: *E.RequireOrRequireResolve,
e_import: *E.Import,
+ e_boolean: *E.Boolean,
+ e_number: *E.Number,
+ e_big_int: *E.BigInt,
+ e_string: *E.String,
+
e_missing: E.Missing,
e_this: E.This,
e_super: E.Super,
@@ -3702,11 +3427,14 @@ pub const Ast = struct {
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
// is conveniently fully parallelized.
- named_imports: AutoHashMap(Ref, NamedImport) = undefined,
- named_exports: StringHashMap(NamedExport) = undefined,
+ named_imports: NamedImports = undefined,
+ named_exports: NamedExports = undefined,
top_level_symbol_to_parts: AutoHashMap(Ref, std.ArrayList(u32)) = undefined,
export_star_import_records: []u32 = &([_]u32{}),
+ pub const NamedImports = std.ArrayHashMap(Ref, NamedImport, Ref.hash, Ref.eql, true);
+ pub const NamedExports = StringHashMap(NamedExport);
+
pub fn initTest(parts: []Part) Ast {
return Ast{
.parts = parts,
@@ -3770,78 +3498,6 @@ pub const Dependency = packed struct {
pub const ExprList = std.ArrayList(Expr);
pub const StmtList = std.ArrayList(Stmt);
pub const BindingList = std.ArrayList(Binding);
-pub const AstData = struct {
- expr_list: ExprList,
- stmt_list: StmtList,
- binding_list: BindingList,
-
- pub fn init(allocator: *std.mem.Allocator) AstData {
- return AstData{
- .expr_list = ExprList.init(allocator),
- .stmt_list = StmtList.init(allocator),
- .binding_list = BindingList.init(allocator),
- };
- }
-
- pub fn deinit(self: *AstData) void {
- self.expr_list.deinit();
- self.stmt_list.deinit();
- self.binding_list.deinit();
- }
-
- pub fn expr(self: *AstData, index: ExprNodeIndex) Expr {
- return self.expr_list.items[index];
- }
-
- pub fn stmt(self: *AstData, index: StmtNodeIndex) Stmt {
- return self.stmt_list.items[index];
- }
-
- pub fn binding(self: *AstData, index: BindingNodeIndex) Binding {
- return self.binding_list.items[index];
- }
-
- pub fn add_(self: *AstData, t: anytype) !void {
- return switch (@TypeOf(t)) {
- Stmt => {
- try self.stmt_list.append(t);
- },
- Expr => {
- try self.expr_list.append(t);
- },
- Binding => {
- try self.binding_list.append(t);
- },
- else => {
- @compileError("Invalid type passed to AstData.add. Expected Stmt, Expr, or Binding.");
- },
- };
- }
-
- pub fn add(self: *AstData, t: anytype) !NodeIndex {
- return &t;
- // return switch (@TypeOf(t)) {
- // Stmt => {
- // var len = self.stmt_list.items.len;
- // try self.stmt_list.append(t);
- // return @intCast(StmtNodeIndex, len);
- // },
- // Expr => {
- // var len = self.expr_list.items.len;
- // try self.expr_list.append(t);
- // return @intCast(ExprNodeIndex, len);
- // },
- // Binding => {
- // var len = self.binding_list.items.len;
- // try self.binding_list.append(t);
- // return @intCast(BindingNodeIndex, len);
- // },
- // else => {
- // @compileError("Invalid type passed to AstData.add. Expected Stmt, Expr, or Binding.");
- // },
- // };
- }
-};
// Each file is made up of multiple parts, and each part consists of one or
// more top-level statements. Parts are used for tree shaking and code
diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig
index b24001323..089fccb93 100644
--- a/src/js_parser/js_parser.zig
+++ b/src/js_parser/js_parser.zig
@@ -619,8 +619,8 @@ pub const SideEffects = enum(u2) {
// }
},
- .s_block => {
- for (stmt.getBlock().stmts) |child| {
+ .s_block => |block| {
+ for (block.stmts) |child| {
if (shouldKeepStmtInDeadControlFlow(child)) {
return true;
}
@@ -734,8 +734,7 @@ pub const SideEffects = enum(u2) {
.e_null, .e_undefined, .e_boolean, .e_number, .e_big_int, .e_string => {
return true;
},
- .e_unary => {
- const e = data.e_unary;
+ .e_unary => |e| {
switch (e.op) {
// number or bigint
.un_pos,
@@ -758,8 +757,7 @@ pub const SideEffects = enum(u2) {
else => {},
}
},
- .e_binary => {
- const e = data.e_binary;
+ .e_binary => |e| {
switch (e.op) {
// boolean
.bin_lt,
@@ -812,8 +810,7 @@ pub const SideEffects = enum(u2) {
else => {},
}
},
- .e_if => {
- const e = data.e_if;
+ .e_if => |e| {
return isPrimitiveWithSideEffects(e.yes.data) and isPrimitiveWithSideEffects(e.no.data);
},
else => {},
@@ -969,8 +966,7 @@ pub const SideEffects = enum(u2) {
.e_object, .e_array, .e_class => {
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
},
- .e_unary => {
- const e_ = exp.e_unary;
+ .e_unary => |e_| {
switch (e_.op) {
.un_void => {
return Result{ .ok = true, .value = false, .side_effects = .could_have_side_effects };
@@ -990,8 +986,7 @@ pub const SideEffects = enum(u2) {
else => {},
}
},
- .e_binary => {
- const e_ = exp.e_binary;
+ .e_binary => |e_| {
switch (e_.op) {
.bin_logical_or => {
// "anything || truthy" is truthy
@@ -1092,8 +1087,8 @@ fn statementCaresAboutScope(stmt: Stmt) bool {
=> {
return false;
},
- .s_local => {
- return stmt.getLocal().kind != .k_var;
+ .s_local => |local| {
+ return local.kind != .k_var;
},
else => {
return true;
@@ -1667,7 +1662,7 @@ pub const Prefill = struct {
pub var ColumnNumber = E.String{ .value = &Prefill.StringLiteral.ColumnNumber };
};
pub const Data = struct {
- pub var BMissing = B{ .b_missing = &BMissing_ };
+ pub var BMissing = B{ .b_missing = BMissing_ };
pub var BMissing_ = B.Missing{};
pub var EMissing = Expr.Data{ .e_missing = EMissing_ };
@@ -1692,12 +1687,12 @@ pub const Prefill = struct {
};
};
-// var keyExprData = Expr.Data{ .e_string = Prefill.String.Key };
-// var jsxChildrenKeyData = Expr.Data{ .e_string = Prefill.String.Children };
+var keyExprData = Expr.Data{ .e_string = &Prefill.String.Key };
+var jsxChildrenKeyData = Expr.Data{ .e_string = &Prefill.String.Children };
var nullExprValueData = E.Null{};
var falseExprValueData = E.Boolean{ .value = false };
var nullValueExpr = Expr.Data{ .e_null = nullExprValueData };
-var falseValueExpr = Expr.Data{ .e_boolean = falseExprValueData };
+var falseValueExpr = Expr.Data{ .e_boolean = &falseExprValueData };
// P is for Parser!
// public only because of Binding.ToExpr
@@ -1732,7 +1727,6 @@ pub const P = struct {
promise_ref: ?js_ast.Ref = null,
scopes_in_order_visitor_index: usize = 0,
has_classic_runtime_warned: bool = false,
- data: js_ast.AstData,
injected_define_symbols: List(Ref),
symbol_uses: SymbolUseMap,
@@ -1790,8 +1784,8 @@ pub const P = struct {
enclosing_class_keyword: logger.Range = logger.Range.None,
import_items_for_namespace: Map(js_ast.Ref, StringHashMap(js_ast.LocRef)),
is_import_item: RefBoolMap,
- named_imports: Map(js_ast.Ref, js_ast.NamedImport),
- named_exports: StringHashMap(js_ast.NamedExport),
+ named_imports: js_ast.Ast.NamedImports,
+ named_exports: js_ast.Ast.NamedExports,
top_level_symbol_to_parts: Map(js_ast.Ref, List(u32)),
import_namespace_cc_map: Map(ImportNamespaceCallOrConstruct, bool),
@@ -1943,7 +1937,7 @@ pub const P = struct {
}
const str = arg.data.e_string;
- const import_record_index = p.addImportRecord(.dynamic, arg.loc, arg.getString().string(p.allocator) catch unreachable);
+ const import_record_index = p.addImportRecord(.dynamic, arg.loc, arg.data.e_string.string(p.allocator) catch unreachable);
p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target;
p.import_records_for_current_part.append(import_record_index) catch unreachable;
return p.e(E.Import{
@@ -5970,8 +5964,7 @@ pub const P = struct {
switch (stmt.data) {
.s_expr => |expr| {
switch (expr.value.data) {
- .e_string => {
- const str = expr.value.getString();
+ .e_string => |str| {
if (!str.prefer_template) {
isDirectivePrologue = true;
@@ -7166,9 +7159,7 @@ pub const P = struct {
// Forbid decorators on class constructors
if (opts.ts_decorators.len > 0) {
switch ((property.key orelse p.panic("Internal error: Expected property {s} to have a key.", .{property})).data) {
- .e_string => {
- const str = property.key.?.getString();
-
+ .e_string => |str| {
if (str.eql(string, "constructor")) {
p.log.addError(p.source, first_decorator_loc, "TypeScript does not allow decorators on class constructors") catch unreachable;
}
@@ -9099,12 +9090,12 @@ pub const P = struct {
pub fn bindingCanBeRemovedIfUnused(p: *P, binding: Binding) bool {
switch (binding.data) {
.b_array => |bi| {
- for (bi.items) |item| {
+ for (bi.items) |*item| {
if (!p.bindingCanBeRemovedIfUnused(item.binding)) {
return false;
}
- if (item.default_value) |default| {
+ if (item.default_value) |*default| {
if (!p.exprCanBeRemovedIfUnused(default)) {
return false;
}
@@ -9112,8 +9103,8 @@ pub const P = struct {
}
},
.b_object => |bi| {
- for (bi.properties) |property| {
- if (!property.flags.is_spread and !p.exprCanBeRemovedIfUnused(property.key)) {
+ for (bi.properties) |*property| {
+ if (!property.flags.is_spread and !p.exprCanBeRemovedIfUnused(&property.key)) {
return false;
}
@@ -9121,7 +9112,7 @@ pub const P = struct {
return false;
}
- if (property.default_value) |default| {
+ if (property.default_value) |*default| {
if (!p.exprCanBeRemovedIfUnused(default)) {
return false;
}
@@ -9155,17 +9146,17 @@ pub const P = struct {
// Expressions marked with this are automatically generated and have
// no side effects by construction.
break;
- } else if (!p.exprCanBeRemovedIfUnused(st.value)) {
+ } else if (!p.exprCanBeRemovedIfUnused(&st.value)) {
return false;
}
},
.s_local => |st| {
- for (st.decls) |decl| {
+ for (st.decls) |*decl| {
if (!p.bindingCanBeRemovedIfUnused(decl.binding)) {
return false;
}
- if (decl.value) |decl_value| {
+ if (decl.value) |*decl_value| {
if (!p.exprCanBeRemovedIfUnused(decl_value)) {
return false;
}
@@ -9183,7 +9174,7 @@ pub const P = struct {
// These never have side effects
.s_function => {},
.s_class => {
- if (!p.classCanBeRemovedIfUnused(&s2.getClass().class)) {
+ if (!p.classCanBeRemovedIfUnused(&s2.data.s_class.class)) {
return false;
}
},
@@ -9192,7 +9183,7 @@ pub const P = struct {
},
}
},
- .expr => |exp| {
+ .expr => |*exp| {
if (!p.exprCanBeRemovedIfUnused(exp)) {
return false;
}
@@ -9449,7 +9440,7 @@ pub const P = struct {
if (e_.properties.len > 0) {
if (e_.key) |key| {
var props = List(G.Property).fromOwnedSlice(p.allocator, e_.properties);
- // props.append(G.Property{ .key = Expr{ .loc = key.loc, .data = keyExprData }, .value = key }) catch unreachable;
+ props.append(G.Property{ .key = Expr{ .loc = key.loc, .data = keyExprData }, .value = key }) catch unreachable;
args[0] = p.e(E.Object{ .properties = props.toOwnedSlice() }, expr.loc);
} else {
args[0] = p.e(E.Object{ .properties = e_.properties }, expr.loc);
@@ -9487,7 +9478,7 @@ pub const P = struct {
for (e_.children) |child, i| {
e_.children[i] = p.visitExpr(child);
}
- const children_key = p.e(E.String{ .utf8 = "key" }, expr.loc);
+ const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
if (e_.children.len == 1) {
props.append(G.Property{
@@ -9514,29 +9505,29 @@ pub const P = struct {
}
if (p.options.jsx.development) {
- // args[3] = Expr{ .loc = expr.loc, .data = falseValueExpr };
+ args[3] = Expr{ .loc = expr.loc, .data = falseValueExpr };
// placeholder src prop for now
- // var source = p.allocator.alloc(G.Property, 3) catch unreachable;
- // p.recordUsage(p.jsx_filename_ref);
- // source[0] = G.Property{
- // .key = Expr{ .loc = expr.loc, .data = Prefill.Data.Filename },
- // .value = p.e(E.Identifier{ .ref = p.jsx_filename_ref }, expr.loc),
- // };
+ var source = p.allocator.alloc(G.Property, 3) catch unreachable;
+ p.recordUsage(p.jsx_filename_ref);
+ source[0] = G.Property{
+ .key = Expr{ .loc = expr.loc, .data = Prefill.Data.Filename },
+ .value = p.e(E.Identifier{ .ref = p.jsx_filename_ref }, expr.loc),
+ };
- // source[1] = G.Property{
- // .key = Expr{ .loc = expr.loc, .data = Prefill.Data.LineNumber },
- // .value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
- // };
+ source[1] = G.Property{
+ .key = Expr{ .loc = expr.loc, .data = Prefill.Data.LineNumber },
+ .value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
+ };
- // source[2] = G.Property{
- // .key = Expr{ .loc = expr.loc, .data = Prefill.Data.ColumnNumber },
- // .value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
- // };
+ source[2] = G.Property{
+ .key = Expr{ .loc = expr.loc, .data = Prefill.Data.ColumnNumber },
+ .value = p.e(E.Number{ .value = @intToFloat(f64, expr.loc.start) }, expr.loc),
+ };
- // args[4] = p.e(E.Object{
- // .properties = source,
- // }, expr.loc);
- // args[5] = Expr{ .data = Prefill.Data.This, .loc = expr.loc };
+ args[4] = p.e(E.Object{
+ .properties = source,
+ }, expr.loc);
+ args[5] = Expr{ .data = Prefill.Data.This, .loc = expr.loc };
}
return p.e(E.Call{
@@ -9556,9 +9547,8 @@ pub const P = struct {
e_.tag = p.visitExpr(tag);
}
- var i: usize = 0;
- while (i < e_.parts.len) : (i += 1) {
- e_.parts[i].value = p.visitExpr(e_.parts[i].value);
+ for (e_.parts) |*part| {
+ part.value = p.visitExpr(part.value);
}
},
@@ -9902,7 +9892,7 @@ pub const P = struct {
in.assign_target,
is_delete_target,
e_.target,
- e_.index.getString().string(p.allocator) catch unreachable,
+ e_.index.data.e_string.string(p.allocator) catch unreachable,
e_.index.loc,
is_call_target,
)) |val| {
@@ -9957,7 +9947,7 @@ pub const P = struct {
}
},
.un_void => {
- if (p.exprCanBeRemovedIfUnused(e_.value)) {
+ if (p.exprCanBeRemovedIfUnused(&e_.value)) {
return p.e(E.Undefined{}, e_.value.loc);
}
},
@@ -10137,7 +10127,7 @@ pub const P = struct {
// Forbid duplicate "__proto__" properties according to the specification
if (!property.flags.is_computed and !property.flags.was_shorthand and !property.flags.is_method and in.assign_target == .none and key.data.isStringValue() and strings.eqlComptime(
// __proto__ is utf8, assume it lives in refs
- key.getString().utf8,
+ key.data.e_string.utf8,
"__proto__",
)) {
if (has_proto) {
@@ -10153,9 +10143,7 @@ pub const P = struct {
// Extract the initializer for expressions like "({ a: b = c } = d)"
if (in.assign_target != .none and property.initializer != null and property.value != null) {
switch (property.value.?.data) {
- .e_binary => {
- const bin = property.value.?.getBinary();
-
+ .e_binary => |bin| {
if (bin.op == .bin_assign) {
property.initializer = bin.right;
property.value = bin.left;
@@ -10395,24 +10383,24 @@ pub const P = struct {
}
pub fn classCanBeRemovedIfUnused(p: *P, class: *G.Class) bool {
- if (class.extends) |extends| {
+ if (class.extends) |*extends| {
if (!p.exprCanBeRemovedIfUnused(extends)) {
return false;
}
}
- for (class.properties) |property| {
- if (!p.exprCanBeRemovedIfUnused(property.key orelse unreachable)) {
+ for (class.properties) |*property| {
+ if (!p.exprCanBeRemovedIfUnused(&(property.key orelse unreachable))) {
return false;
}
- if (property.value) |val| {
+ if (property.value) |*val| {
if (!p.exprCanBeRemovedIfUnused(val)) {
return false;
}
}
- if (property.initializer) |val| {
+ if (property.initializer) |*val| {
if (!p.exprCanBeRemovedIfUnused(val)) {
return false;
}
@@ -10425,7 +10413,7 @@ pub const P = struct {
// TODO:
// When React Fast Refresh is enabled, anything that's a JSX component should not be removable
// This is to improve the reliability of fast refresh between page loads.
- pub fn exprCanBeRemovedIfUnused(p: *P, expr: Expr) bool {
+ pub fn exprCanBeRemovedIfUnused(p: *P, expr: *const Expr) bool {
switch (expr.data) {
.e_null,
.e_undefined,
@@ -10498,10 +10486,10 @@ pub const P = struct {
return true;
},
.e_if => |ex| {
- return p.exprCanBeRemovedIfUnused(ex.test_) and p.exprCanBeRemovedIfUnused(ex.yes) and p.exprCanBeRemovedIfUnused(ex.no);
+ return p.exprCanBeRemovedIfUnused(&ex.test_) and p.exprCanBeRemovedIfUnused(&ex.yes) and p.exprCanBeRemovedIfUnused(&ex.no);
},
.e_array => |ex| {
- for (ex.items) |item| {
+ for (ex.items) |*item| {
if (!p.exprCanBeRemovedIfUnused(item)) {
return false;
}
@@ -10510,14 +10498,14 @@ pub const P = struct {
return true;
},
.e_object => |ex| {
- for (ex.properties) |property| {
+ for (ex.properties) |*property| {
// The key must still be evaluated if it's computed or a spread
if (property.kind == .spread or property.flags.is_computed or property.flags.is_spread) {
return false;
}
- if (property.value) |val| {
+ if (property.value) |*val| {
if (!p.exprCanBeRemovedIfUnused(val)) {
return false;
}
@@ -10530,7 +10518,7 @@ pub const P = struct {
// A call that has been marked "__PURE__" can be removed if all arguments
// can be removed. The annotation causes us to ignore the target.
if (ex.can_be_unwrapped_if_unused) {
- for (ex.args) |arg| {
+ for (ex.args) |*arg| {
if (!p.exprCanBeRemovedIfUnused(arg)) {
return false;
}
@@ -10544,7 +10532,7 @@ pub const P = struct {
// A call that has been marked "__PURE__" can be removed if all arguments
// can be removed. The annotation causes us to ignore the target.
if (ex.can_be_unwrapped_if_unused) {
- for (ex.args) |arg| {
+ for (ex.args) |*arg| {
if (!p.exprCanBeRemovedIfUnused(arg)) {
return false;
}
@@ -10556,7 +10544,7 @@ pub const P = struct {
.e_unary => |ex| {
switch (ex.op) {
.un_typeof, .un_void, .un_not => {
- return p.exprCanBeRemovedIfUnused(ex.value);
+ return p.exprCanBeRemovedIfUnused(&ex.value);
},
else => {},
}
@@ -10564,7 +10552,7 @@ pub const P = struct {
.e_binary => |ex| {
switch (ex.op) {
.bin_strict_eq, .bin_strict_ne, .bin_comma, .bin_logical_or, .bin_logical_and, .bin_nullish_coalescing => {
- return p.exprCanBeRemovedIfUnused(ex.left) and p.exprCanBeRemovedIfUnused(ex.right);
+ return p.exprCanBeRemovedIfUnused(&ex.left) and p.exprCanBeRemovedIfUnused(&ex.right);
},
else => {},
}
@@ -11968,7 +11956,7 @@ pub const P = struct {
if (is_private) {} else if (!property.flags.is_method and !property.flags.is_computed) {
if (property.key) |key| {
if (@as(Expr.Tag, key.data) == .e_string) {
- name_to_keep = key.getString().string(p.allocator) catch unreachable;
+ name_to_keep = key.data.e_string.string(p.allocator) catch unreachable;
}
}
}
@@ -12240,13 +12228,10 @@ pub const P = struct {
}
// First, try converting the expressions to bindings
- var i: usize = 0;
- while (i < items.len) : (i += 1) {
+ for (items) |_, i| {
var is_spread = false;
switch (items[i].data) {
- .e_spread => {
- const v = items[i].getSpread();
-
+ .e_spread => |v| {
is_spread = true;
items[i] = v.value;
},
@@ -12586,7 +12571,6 @@ pub const P = struct {
.require_transposer = @TypeOf(_parser.require_transposer).init(_parser),
.require_resolve_transposer = @TypeOf(_parser.require_resolve_transposer).init(_parser),
.lexer = lexer,
- .data = js_ast.AstData.init(allocator),
};
return _parser;
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 698eb3227..351e6a67e 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -353,16 +353,12 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(keyword);
p.printSpace();
- var i: usize = 0;
-
- while (i < decls.len) : (i += 1) {
+ for (decls) |*decl, i| {
if (i != 0) {
p.print(",");
p.printSpace();
}
- const decl = decls[i];
-
p.printBinding(decl.binding);
if (decl.value) |value| {
@@ -857,9 +853,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSpaceBeforeIdentifier();
p.print("this");
},
- .e_spread => {
- const e = expr.getSpread();
-
+ .e_spread => |e| {
p.print("...");
p.printExpr(e.value, .comma, ExprFlag.None());
},
@@ -871,9 +865,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSpaceBeforeIdentifier();
p.print("import.meta");
},
- .e_new => {
- const e = expr.getNew();
-
+ .e_new => |e| {
const has_pure_comment = e.can_be_unwrapped_if_unused;
const wrap = level.gte(.call) or (has_pure_comment and level.gte(.postfix));
@@ -894,15 +886,12 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print("(");
if (e.args.len > 0) {
- var i: usize = 0;
- p.printExpr(e.args[i], .comma, ExprFlag.None());
- i = 1;
+ p.printExpr(e.args[0], .comma, ExprFlag.None());
- while (i < e.args.len) {
+ for (e.args[1..]) |arg, i| {
p.print(",");
p.printSpace();
- p.printExpr(e.args[i], .comma, ExprFlag.None());
- i += 1;
+ p.printExpr(arg, .comma, ExprFlag.None());
}
}
@@ -913,9 +902,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_call => {
- const e = expr.getCall();
-
+ .e_call => |e| {
var wrap = level.gte(.new) or flags.forbid_call;
var target_flags = ExprFlag.None();
if (e.optional_chain == null) {
@@ -957,12 +944,10 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (e.args.len > 0) {
p.printExpr(e.args[0], .comma, ExprFlag.None());
- var i: usize = 1;
- while (i < e.args.len) {
+ for (e.args[1..]) |arg, i| {
p.print(",");
p.printSpace();
- p.printExpr(e.args[i], .comma, ExprFlag.None());
- i += 1;
+ p.printExpr(arg, .comma, ExprFlag.None());
}
}
@@ -971,14 +956,10 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_require => {
- const e = expr.getRequire();
-
+ .e_require => |e| {
p.printRequireOrImportExpr(e.import_record_index, &([_]G.Comment{}), level, flags);
},
- .e_require_or_require_resolve => {
- const e = expr.getRequireOrRequireResolve();
-
+ .e_require_or_require_resolve => |e| {
const wrap = level.gte(.new) or flags.forbid_call;
if (wrap) {
p.print("(");
@@ -1003,8 +984,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_import => {
- const e = expr.getImport();
+ .e_import => |e| {
// Handle non-string expressions
if (Ref.isSourceIndexNull(e.import_record_index)) {
@@ -1037,9 +1017,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printRequireOrImportExpr(e.import_record_index, e.leading_interior_comments, level, flags);
}
},
- .e_dot => {
- const e = expr.getDot();
-
+ .e_dot => |e| {
var wrap = false;
if (e.optional_chain == null) {
flags.has_non_optional_chain_parent = false;
@@ -1078,9 +1056,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_index => {
- const e = expr.getIndex();
-
+ .e_index => |e| {
var wrap = false;
if (e.optional_chain == null) {
flags.has_non_optional_chain_parent = false;
@@ -1122,9 +1098,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_if => {
- const e = expr.getIf();
-
+ .e_if => |e| {
const wrap = level.gte(.conditional);
if (wrap) {
p.print("(");
@@ -1143,9 +1117,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_arrow => {
- const e = expr.getArrow();
-
+ .e_arrow => |e| {
const wrap = level.gte(.assign);
if (wrap) {
@@ -1185,9 +1157,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_function => {
- const e = expr.getFunction();
-
+ .e_function => |e| {
const n = p.js.lenI();
var wrap = p.stmt_start == n or p.export_default_start == n;
@@ -1214,9 +1184,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_class => {
- const e = expr.getClass();
-
+ .e_class => |e| {
const n = p.js.lenI();
var wrap = p.stmt_start == n or p.export_default_start == n;
if (wrap) {
@@ -1234,17 +1202,14 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_array => {
- const e = expr.getArray();
-
+ .e_array => |e| {
p.print("[");
if (e.items.len > 0) {
if (!e.is_single_line) {
p.options.indent += 1;
}
- var i: usize = 0;
- while (i < e.items.len) : (i += 1) {
+ for (e.items) |item, i| {
if (i != 0) {
p.print(",");
if (e.is_single_line) {
@@ -1255,11 +1220,11 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printNewline();
p.printIndent();
}
- p.printExpr(e.items[i], .comma, ExprFlag.None());
+ p.printExpr(item, .comma, ExprFlag.None());
if (i == e.items.len - 1) {
// Make sure there's a comma after trailing missing items
- switch (e.items[i].data) {
+ switch (item.data) {
.e_missing => {
p.print(",");
},
@@ -1277,9 +1242,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print("]");
},
- .e_object => {
- const e = expr.getObject();
-
+ .e_object => |e| {
const n = p.js.lenI();
const wrap = p.stmt_start == n or p.arrow_expr_start == n;
@@ -1318,31 +1281,26 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_boolean => {
- const e = expr.getBoolean();
-
+ .e_boolean => |e| {
p.printSpaceBeforeIdentifier();
p.print(if (e.value) "true" else "false");
},
- .e_string => {
- const e = expr.getString();
+ .e_string => |e| {
// If this was originally a template literal, print it as one as long as we're not minifying
if (e.prefer_template) {
p.print("`");
- p.printString(e.*, '`');
+ p.printString(e, '`');
p.print("`");
return;
}
const c = p.bestQuoteCharForString(e.value, true);
p.print(c);
- p.printString(e.*, c);
+ p.printString(e, c);
p.print(c);
},
- .e_template => {
- const e = expr.getTemplate();
-
+ .e_template => |e| {
if (e.tag) |tag| {
// Optional chains are forbidden in template tags
if (expr.isOptionalChain()) {
@@ -1359,7 +1317,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (e.tag != null) {
p.print(e.head.utf8);
} else {
- p.printString(e.head, '`');
+ p.printString(&e.head, '`');
}
}
@@ -1371,15 +1329,13 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (e.tag != null) {
p.print(part.tail.utf8);
} else {
- p.printString(part.tail, '`');
+ p.printString(&part.tail, '`');
}
}
}
p.print("`");
},
- .e_reg_exp => {
- const e = expr.getRegExp();
-
+ .e_reg_exp => |e| {
const n = p.js.len();
// Avoid forming a single-line comment
@@ -1392,16 +1348,12 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// Need a space before the next identifier to avoid it turning into flags
p.prev_reg_exp_end = p.js.lenI();
},
- .e_big_int => {
- const e = expr.getBigInt();
-
+ .e_big_int => |e| {
p.printSpaceBeforeIdentifier();
p.print(e.value);
p.print('n');
},
- .e_number => {
- const e = expr.getNumber();
-
+ .e_number => |e| {
const value = e.value;
const absValue = std.math.fabs(value);
@@ -1441,9 +1393,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.prev_num_end = p.js.lenI();
}
},
- .e_identifier => {
- const e = expr.getIdentifier();
-
+ .e_identifier => |e| {
const name = p.renamer.nameForSymbol(e.ref);
const wrap = p.js.lenI() == p.for_of_init_start and strings.eqlComptime(name, "let");
@@ -1458,8 +1408,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_import_identifier => {
- const e = expr.getImportIdentifier();
+ .e_import_identifier => |e| {
// Potentially use a property access instead of an identifier
const ref = p.symbols.follow(e.ref);
@@ -1502,9 +1451,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSymbol(e.ref);
}
},
- .e_await => {
- const e = expr.getAwait();
-
+ .e_await => |e| {
const wrap = level.gte(.prefix);
if (wrap) {
@@ -1520,9 +1467,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_yield => {
- const e = expr.getYield();
-
+ .e_yield => |e| {
const wrap = level.gte(.assign);
if (wrap) {
p.print("(");
@@ -1543,9 +1488,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_unary => {
- const e = expr.getUnary();
-
+ .e_unary => |e| {
const entry: Op = Op.Table.get(e.op);
const wrap = level.gte(entry.level);
@@ -1576,9 +1519,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
- .e_binary => {
- const e = expr.getBinary();
-
+ .e_binary => |e| {
const entry: Op = Op.Table.get(e.op);
var wrap = level.gte(entry.level) or (e.op == Op.Code.bin_in and flags.forbid_in);
@@ -1718,7 +1659,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
- pub fn printString(p: *Printer, str: E.String, c: u8) void {
+ pub fn printString(p: *Printer, str: *const E.String, c: u8) void {
if (!str.isUTF8()) {
p.printQuotedUTF16(str.value, c);
} else {
@@ -1811,8 +1752,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
.e_private_identifier => {
p.printSymbol(_key.getPrivateIdentifier().ref);
},
- .e_string => {
- const key = _key.getString();
+ .e_string => |key| {
p.addSourceMapping(_key.loc);
if (key.isUTF8()) {
p.printSpaceBeforeIdentifier();
@@ -1966,8 +1906,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.options.indent += 1;
}
- var i: usize = 0;
- while (i < b.items.len) : (i += 1) {
+ for (b.items) |*item, i| {
if (i != 0) {
p.print(",");
if (b.is_single_line) {
@@ -1985,7 +1924,6 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print("...");
}
- const item = b.items[i];
p.printBinding(item.binding);
p.maybePrintDefaultBindingValue(item);
@@ -2017,8 +1955,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.options.indent += 1;
}
- var i: usize = 0;
- while (i < b.properties.len) : (i += 1) {
+ for (b.properties) |*property, i| {
if (i != 0) {
p.print(",");
if (b.is_single_line) {
@@ -2031,8 +1968,6 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printIndent();
}
- const property = b.properties[i];
-
if (property.flags.is_spread) {
p.print("...");
} else {
@@ -2048,8 +1983,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
switch (property.key.data) {
- .e_string => {
- const str = property.key.getString();
+ .e_string => |str| {
if (str.isUTF8()) {
p.addSourceMapping(property.key.loc);
p.printSpaceBeforeIdentifier();
@@ -2268,8 +2202,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.options.indent += 1;
}
- var i: usize = 0;
- while (i < s.items.len) : (i += 1) {
+ for (s.items) |*item, i| {
if (i != 0) {
p.print(",");
if (s.is_single_line) {
@@ -2281,7 +2214,6 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printNewline();
p.printIndent();
}
- const item = s.items[i];
const name = p.renamer.nameForSymbol(item.name.ref.?);
p.printIdentifier(name);
if (!strings.eql(name, item.alias)) {
@@ -2313,9 +2245,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.options.indent += 1;
}
- var i: usize = 0;
-
- while (i < s.items.len) : (i += 1) {
+ for (s.items) |*item, i| {
if (i != 0) {
p.print(",");
if (s.is_single_line) {
@@ -2327,7 +2257,6 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printNewline();
p.printIndent();
}
- const item = s.items[i];
const name = p.renamer.nameForSymbol(item.name.ref.?);
p.printIdentifier(name);
if (!strings.eql(name, item.alias)) {
@@ -2613,8 +2542,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.options.unindent();
}
- var i: usize = 0;
- while (i < s.items.len) : (i += 1) {
+ for (s.items) |*item, i| {
if (i != 0) {
p.print(",");
if (s.is_single_line) {
@@ -2627,7 +2555,6 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printIndent();
}
- const item = s.items[i];
p.printClauseAlias(item.alias);
const name = p.renamer.nameForSymbol(item.name.ref.?);
if (!strings.eql(name, item.alias)) {
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 053e00a90..303ffc2d4 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -133,12 +133,11 @@ pub const PackageJSON = struct {
//
if (json.asProperty("browser")) |browser_prop| {
switch (browser_prop.expr.data) {
- .e_object => {
- const obj = browser_prop.expr.getObject();
+ .e_object => |obj| {
// The value is an object
// Remap all files in the browser field
- for (obj.properties) |prop| {
+ for (obj.properties) |*prop| {
var _key_str = (prop.key orelse continue).asString(r.allocator) orelse continue;
const value: js_ast.Expr = prop.value orelse continue;
@@ -154,13 +153,11 @@ pub const PackageJSON = struct {
const key = r.allocator.dupe(u8, r.fs.normalize(_key_str)) catch unreachable;
switch (value.data) {
- .e_string => {
- const str = value.getString();
+ .e_string => |str| {
// If this is a string, it's a replacement package
package_json.browser_map.put(key, str.string(r.allocator) catch unreachable) catch unreachable;
},
- .e_boolean => {
- const boolean = value.getBoolean();
+ .e_boolean => |boolean| {
if (!boolean.value) {
package_json.browser_map.put(key, "") catch unreachable;
}