aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/bundler.zig35
-rw-r--r--src/js_lexer.zig5
-rw-r--r--src/json_parser.zig38
3 files changed, 63 insertions, 15 deletions
diff --git a/src/bundler.zig b/src/bundler.zig
index 2240d9115..b34241f1b 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -1595,22 +1595,27 @@ pub fn NewBundler(cache_files: bool) type {
},
.json => {
// parse the JSON _only_ to catch errors at build time.
- const parsed_expr = json_parser.ParseJSON(&source, worker.data.log, worker.allocator) catch return;
-
- if (parsed_expr.data != .e_missing) {
-
- // Then, we store it as a UTF8 string at runtime
- // This is because JavaScript engines are much faster at parsing JSON strings than object literals
- json_e_string = js_ast.E.String{ .utf8 = source.contents, .prefer_template = true };
- var json_string_expr = js_ast.Expr{ .data = .{ .e_string = &json_e_string }, .loc = logger.Loc{ .start = 0 } };
- json_call_args[0] = json_string_expr;
- json_e_identifier = js_ast.E.Identifier{ .ref = Ref{ .source_index = 0, .inner_index = @intCast(Ref.Int, json_ast_symbols_list.len - 1) } };
-
- json_e_call = js_ast.E.Call{
- .target = js_ast.Expr{ .data = .{ .e_identifier = &json_e_identifier }, .loc = logger.Loc{ .start = 0 } },
- .args = std.mem.span(&json_call_args),
+ const json_parse_result = json_parser.ParseJSONForBundling(&source, worker.data.log, worker.allocator) catch return;
+
+ if (json_parse_result.tag != .empty) {
+ const expr = brk: {
+ // If it's an ascii string, we just print it out with a big old JSON.parse()
+ if (json_parse_result.tag == .ascii) {
+ json_e_string = js_ast.E.String{ .utf8 = source.contents, .prefer_template = true };
+ var json_string_expr = js_ast.Expr{ .data = .{ .e_string = &json_e_string }, .loc = logger.Loc{ .start = 0 } };
+ json_call_args[0] = json_string_expr;
+ json_e_identifier = js_ast.E.Identifier{ .ref = Ref{ .source_index = 0, .inner_index = @intCast(Ref.Int, json_ast_symbols_list.len - 1) } };
+
+ json_e_call = js_ast.E.Call{
+ .target = js_ast.Expr{ .data = .{ .e_identifier = &json_e_identifier }, .loc = logger.Loc{ .start = 0 } },
+ .args = std.mem.span(&json_call_args),
+ };
+ break :brk js_ast.Expr{ .data = .{ .e_call = &json_e_call }, .loc = logger.Loc{ .start = 0 } };
+ // If we're going to have to convert it to a UTF16, just make it an object actually
+ } else {
+ break :brk json_parse_result.expr;
+ }
};
- const expr = js_ast.Expr{ .data = .{ .e_call = &json_e_call }, .loc = logger.Loc{ .start = 0 } };
var stmt = js_ast.Stmt.alloc(worker.allocator, js_ast.S.ExportDefault, js_ast.S.ExportDefault{
.value = js_ast.StmtOrExpr{ .expr = expr },
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index 2cfc0f732..3491ca8a7 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -84,6 +84,8 @@ pub const Lexer = struct {
string_literal: JavascriptString,
string_literal_is_ascii: bool = false,
+ is_ascii_only: bool = true,
+
pub fn clone(self: *const LexerType) LexerType {
return LexerType{
.log = self.log,
@@ -117,6 +119,7 @@ pub const Lexer = struct {
.string_literal_slice = self.string_literal_slice,
.string_literal = self.string_literal,
.string_literal_is_ascii = self.string_literal_is_ascii,
+ .is_ascii_only = self.is_ascii_only,
};
}
@@ -210,6 +213,7 @@ pub const Lexer = struct {
pub fn decodeEscapeSequences(lexer: *LexerType, start: usize, text: string, comptime BufType: type, buf_: *BufType) !void {
var buf = buf_.*;
defer buf_.* = buf;
+ lexer.is_ascii_only = false;
var iter = CodepointIterator{ .bytes = text[start..], .i = 0 };
const start_length = buf.items.len;
@@ -649,6 +653,7 @@ pub const Lexer = struct {
try lexer.decodeEscapeSequences(0, lexer.string_literal_slice, @TypeOf(lexer.string_literal_buffer), &lexer.string_literal_buffer);
lexer.string_literal = lexer.string_literal_buffer.items;
}
+ lexer.is_ascii_only = lexer.is_ascii_only and lexer.string_literal_is_ascii;
if (comptime !FeatureFlags.allow_json_single_quotes) {
if (quote == '\'' and lexer.json_options != null) {
diff --git a/src/json_parser.zig b/src/json_parser.zig
index 696b62325..5d65291a7 100644
--- a/src/json_parser.zig
+++ b/src/json_parser.zig
@@ -255,6 +255,44 @@ pub fn ParseJSON(source: *const logger.Source, log: *logger.Log, allocator: *std
return try parser.parseExpr(false);
}
+pub const JSONParseResult = struct {
+ expr: Expr,
+ tag: Tag,
+
+ pub const Tag = enum {
+ expr,
+ ascii,
+ empty,
+ };
+};
+
+pub fn ParseJSONForBundling(source: *const logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !JSONParseResult {
+ var parser = try JSONParser.init(allocator, source, log);
+ switch (source.contents.len) {
+ // This is to be consisntent with how disabled JS files are handled
+ 0 => {
+ return JSONParseResult{ .expr = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }, .tag = .empty };
+ },
+ // This is a fast pass I guess
+ 2 => {
+ if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) {
+ return JSONParseResult{ .expr = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data }, .tag = .expr };
+ } else if (strings.eqlComptime(source.contents[0..1], "{}")) {
+ return JSONParseResult{ .expr = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }, .tag = .expr };
+ } else if (strings.eqlComptime(source.contents[0..1], "[]")) {
+ return JSONParseResult{ .expr = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data }, .tag = .expr };
+ }
+ },
+ else => {},
+ }
+
+ const result = try parser.parseExpr(false);
+ return JSONParseResult{
+ .tag = if (parser.lexer.is_ascii_only) JSONParseResult.Tag.ascii else JSONParseResult.Tag.expr,
+ .expr = result,
+ };
+}
+
// threadlocal var env_json_auto_quote_buffer: MutableString = undefined;
// threadlocal var env_json_auto_quote_buffer_loaded: bool = false;
pub fn ParseEnvJSON(source: *const logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !Expr {