aboutsummaryrefslogtreecommitdiff
path: root/src/defines.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/defines.zig')
-rw-r--r--src/defines.zig332
1 files changed, 164 insertions, 168 deletions
diff --git a/src/defines.zig b/src/defines.zig
index c16bf9edb..08447cde1 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -63,63 +63,63 @@ pub const DefineData = struct {
var user_defines = UserDefines.init(allocator);
try user_defines.ensureCapacity(defines.count());
- // var iter = defines.iterator();
- // while (iter.next()) |entry| {
- // var splitter = std.mem.split(entry.key, ".");
- // while (splitter.next()) |part| {
- // if (!js_lexer.isIdentifier(part)) {
- // if (strings.eql(part, entry.key)) {
- // try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
- // } else {
- // try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
- // }
- // break;
- // }
- // }
-
- // if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
- // var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
- // ident.ref = Ref.None;
- // ident.can_be_removed_if_unused = true;
- // user_defines.putAssumeCapacity(
- // entry.key,
- // DefineData{
- // .value = js_ast.Expr.Data{ .e_identifier = ident },
- // .original_name = entry.value,
- // .can_be_removed_if_unused = true,
- // },
- // );
- // // user_defines.putAssumeCapacity(
- // // entry.key,
- // // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
- // // );
- // continue;
- // }
- // var _log = log;
- // var source = logger.Source{
- // .contents = entry.value,
- // .path = defines_path,
- // .identifier_name = "defines",
- // .key_path = fs.Path.initWithNamespace("defines", "internal"),
- // };
- // var expr = try json_parser.ParseJSON(&source, _log, allocator);
- // var data: js_ast.Expr.Data = undefined;
- // switch (expr.data) {
- // .e_missing => {
- // continue;
- // },
- // .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
- // data = expr.data;
- // },
- // else => {
- // continue;
- // },
- // }
-
- // user_defines.putAssumeCapacity(entry.key, DefineData{
- // .value = data,
- // });
- // }
+ var iter = defines.iterator();
+ while (iter.next()) |entry| {
+ var splitter = std.mem.split(entry.key, ".");
+ while (splitter.next()) |part| {
+ if (!js_lexer.isIdentifier(part)) {
+ if (strings.eql(part, entry.key)) {
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
+ } else {
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
+ }
+ break;
+ }
+ }
+
+ if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
+ var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
+ ident.ref = Ref.None;
+ ident.can_be_removed_if_unused = true;
+ user_defines.putAssumeCapacity(
+ entry.key,
+ DefineData{
+ .value = js_ast.Expr.Data{ .e_identifier = ident },
+ .original_name = entry.value,
+ .can_be_removed_if_unused = true,
+ },
+ );
+ // user_defines.putAssumeCapacity(
+ // entry.key,
+ // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
+ // );
+ continue;
+ }
+ var _log = log;
+ var source = logger.Source{
+ .contents = entry.value,
+ .path = defines_path,
+ .identifier_name = "defines",
+ .key_path = fs.Path.initWithNamespace("defines", "internal"),
+ };
+ var expr = try json_parser.ParseJSON(&source, _log, allocator);
+ var data: js_ast.Expr.Data = undefined;
+ switch (expr.data) {
+ .e_missing => {
+ continue;
+ },
+ .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
+ data = expr.data;
+ },
+ else => {
+ continue;
+ },
+ }
+
+ user_defines.putAssumeCapacity(entry.key, DefineData{
+ .value = data,
+ });
+ }
return user_defines;
}
@@ -147,6 +147,10 @@ pub const DotDefine = struct {
data: DefineData,
};
+// var nan_val = try allocator.create(js_ast.E.Number);
+var nan_val = js_ast.E.Number{ .value = std.math.nan_f64 };
+var inf_val = js_ast.E.Number{ .value = std.math.inf_f64 };
+
pub const Define = struct {
identifiers: std.StringHashMap(IdentifierDefine),
dots: std.StringHashMap([]DotDefine),
@@ -157,118 +161,110 @@ pub const Define = struct {
define.allocator = allocator;
define.identifiers = std.StringHashMap(IdentifierDefine).init(allocator);
define.dots = std.StringHashMap([]DotDefine).init(allocator);
+ try define.identifiers.ensureCapacity(641);
+ try define.dots.ensureCapacity(64);
+
+ var val = js_ast.Expr.Data{ .e_undefined = .{} };
+ var ident_define = IdentifierDefine{
+ .value = val,
+ };
+ var value_define = DefineData{ .value = val, .valueless = true };
+ // Step 1. Load the globals into the hash tables
+ for (GlobalDefinesKey) |global| {
+ if (global.len == 1) {
+
+ // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
+ define.identifiers.putAssumeCapacity(global[0], value_define);
+ } else {
+ const key = global[global.len - 1];
+ // TODO: move this to comptime
+ // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
+ if (define.dots.getEntry(key)) |entry| {
+ var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
+ list.appendSliceAssumeCapacity(entry.value);
+ list.appendAssumeCapacity(DotDefine{
+ .parts = global[0..global.len],
+ .data = value_define,
+ });
+
+ define.dots.putAssumeCapacity(key, list.toOwnedSlice());
+ } else {
+ var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
+ list.appendAssumeCapacity(DotDefine{
+ .parts = global[0..global.len],
+ .data = value_define,
+ });
+
+ define.dots.putAssumeCapacity(key, list.toOwnedSlice());
+ }
+ }
+ }
+
+ // Step 2. Swap in certain literal values because those can be constant folded
+ define.identifiers.putAssumeCapacity("undefined", value_define);
+ define.identifiers.putAssumeCapacity("NaN", DefineData{
+ .value = js_ast.Expr.Data{ .e_number = &nan_val },
+ });
+ define.identifiers.putAssumeCapacity("Infinity", DefineData{
+ .value = js_ast.Expr.Data{ .e_number = &inf_val },
+ });
+
+ // Step 3. Load user data into hash tables
+ // At this stage, user data has already been validated.
+ if (_user_defines) |user_defines| {
+ var iter = user_defines.iterator();
+ while (iter.next()) |user_define| {
+ // If it has a dot, then it's a DotDefine.
+ // e.g. process.env.NODE_ENV
+ if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
+ const tail = user_define.key[last_dot + 1 .. user_define.key.len];
+ const remainder = user_define.key[0..last_dot];
+ const count = std.mem.count(u8, remainder, ".") + 1;
+ var parts = try allocator.alloc(string, count + 1);
+ var splitter = std.mem.split(remainder, ".");
+ var i: usize = 0;
+ while (splitter.next()) |split| : (i += 1) {
+ parts[i] = split;
+ }
+ parts[i] = tail;
+ var didFind = false;
+ var initial_values: []DotDefine = &([_]DotDefine{});
+
+ // "NODE_ENV"
+ if (define.dots.getEntry(tail)) |entry| {
+ for (entry.value) |*part| {
+ // ["process", "env"] === ["process", "env"] (if that actually worked)
+ if (arePartsEqual(part.parts, parts)) {
+ part.data = part.data.merge(user_define.value);
+ didFind = true;
+ break;
+ }
+ }
+
+ initial_values = entry.value;
+ }
+
+ if (!didFind) {
+ var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
+ if (initial_values.len > 0) {
+ list.appendSliceAssumeCapacity(initial_values);
+ }
+
+ list.appendAssumeCapacity(DotDefine{
+ .data = user_define.value,
+ // TODO: do we need to allocate this?
+ .parts = parts,
+ });
+ try define.dots.put(tail, list.toOwnedSlice());
+ }
+ } else {
+ // e.g. IS_BROWSER
+ try define.identifiers.put(user_define.key, user_define.value);
+ }
+ }
+ }
+
return define;
- // try define.identifiers.ensureCapacity(641);
- // try define.dots.ensureCapacity(64);
-
- // var undefined_val = try allocator.create(js_ast.E.Undefined);
- // var val = js_ast.Expr.Data{ .e_undefined = undefined_val };
- // var ident_define = IdentifierDefine{
- // .value = val,
- // };
- // var value_define = DefineData{ .value = val, .valueless = true };
- // // Step 1. Load the globals into the hash tables
- // for (GlobalDefinesKey) |global| {
- // if (global.len == 1) {
-
- // // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
- // define.identifiers.putAssumeCapacity(global[0], value_define);
- // } else {
- // const key = global[global.len - 1];
- // // TODO: move this to comptime
- // // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
- // if (define.dots.getEntry(key)) |entry| {
- // var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
- // list.appendSliceAssumeCapacity(entry.value);
- // list.appendAssumeCapacity(DotDefine{
- // .parts = global[0..global.len],
- // .data = value_define,
- // });
-
- // define.dots.putAssumeCapacity(key, list.toOwnedSlice());
- // } else {
- // var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
- // list.appendAssumeCapacity(DotDefine{
- // .parts = global[0..global.len],
- // .data = value_define,
- // });
-
- // define.dots.putAssumeCapacity(key, list.toOwnedSlice());
- // }
- // }
- // }
-
- // var nan_val = try allocator.create(js_ast.E.Number);
- // nan_val.value = std.math.nan_f64;
-
- // var inf_val = try allocator.create(js_ast.E.Number);
- // inf_val.value = std.math.inf_f64;
-
- // // Step 2. Swap in certain literal values because those can be constant folded
- // define.identifiers.putAssumeCapacity("undefined", value_define);
- // define.identifiers.putAssumeCapacity("NaN", DefineData{
- // .value = js_ast.Expr.Data{ .e_number = nan_val },
- // });
- // define.identifiers.putAssumeCapacity("Infinity", DefineData{
- // .value = js_ast.Expr.Data{ .e_number = inf_val },
- // });
-
- // // Step 3. Load user data into hash tables
- // // At this stage, user data has already been validated.
- // if (_user_defines) |user_defines| {
- // var iter = user_defines.iterator();
- // while (iter.next()) |user_define| {
- // // If it has a dot, then it's a DotDefine.
- // // e.g. process.env.NODE_ENV
- // if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
- // const tail = user_define.key[last_dot + 1 .. user_define.key.len];
- // const remainder = user_define.key[0..last_dot];
- // const count = std.mem.count(u8, remainder, ".") + 1;
- // var parts = try allocator.alloc(string, count + 1);
- // var splitter = std.mem.split(remainder, ".");
- // var i: usize = 0;
- // while (splitter.next()) |split| : (i += 1) {
- // parts[i] = split;
- // }
- // parts[i] = tail;
- // var didFind = false;
- // var initial_values: []DotDefine = &([_]DotDefine{});
-
- // // "NODE_ENV"
- // if (define.dots.getEntry(tail)) |entry| {
- // for (entry.value) |*part| {
- // // ["process", "env"] === ["process", "env"] (if that actually worked)
- // if (arePartsEqual(part.parts, parts)) {
- // part.data = part.data.merge(user_define.value);
- // didFind = true;
- // break;
- // }
- // }
-
- // initial_values = entry.value;
- // }
-
- // if (!didFind) {
- // var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
- // if (initial_values.len > 0) {
- // list.appendSliceAssumeCapacity(initial_values);
- // }
-
- // list.appendAssumeCapacity(DotDefine{
- // .data = user_define.value,
- // // TODO: do we need to allocate this?
- // .parts = parts,
- // });
- // try define.dots.put(tail, list.toOwnedSlice());
- // }
- // } else {
- // // e.g. IS_BROWSER
- // try define.identifiers.put(user_define.key, user_define.value);
- // }
- // }
- // }
-
- // return define;
}
};