aboutsummaryrefslogtreecommitdiff
path: root/src/json_parser.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/json_parser.zig')
-rw-r--r--src/json_parser.zig138
1 files changed, 110 insertions, 28 deletions
diff --git a/src/json_parser.zig b/src/json_parser.zig
index 8a025864f..afd7c8046 100644
--- a/src/json_parser.zig
+++ b/src/json_parser.zig
@@ -11,6 +11,7 @@ usingnamespace @import("strings.zig");
usingnamespace @import("ast/base.zig");
usingnamespace js_ast.G;
+const expect = std.testing.expect;
const ImportKind = importRecord.ImportKind;
const BindingNodeIndex = js_ast.BindingNodeIndex;
@@ -41,13 +42,13 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
const Lexer = if (opts.allow_comments) js_lexer.TSConfigJSONLexer else js_lexer.JSONLexer;
return struct {
lexer: Lexer,
- source: logger.Source,
- log: logger.Log,
+ source: *logger.Source,
+ log: *logger.Log,
allocator: *std.mem.Allocator,
- pub fn init(allocator: *std.mem.Allocator, source: logger.Source, log: logger.Log) Parser {
+ pub fn init(allocator: *std.mem.Allocator, source: *logger.Source, log: *logger.Log) !Parser {
return Parser{
- .lexer = Lexer.init(log, source, allocator),
+ .lexer = try Lexer.init(log, source, allocator),
.allocator = allocator,
.log = log,
.source = source,
@@ -63,7 +64,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
return Expr.alloc(p.allocator, t, loc);
}
}
- pub fn parseExpr(p: *Parser) ?Expr {
+ pub fn parseExpr(p: *Parser) Expr {
const loc = p.lexer.loc();
switch (p.lexer.token) {
@@ -104,7 +105,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
.t_open_bracket => {
p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
- var exprs = List(Expr).init(p.allocator);
+ var exprs = std.ArrayList(Expr).init(p.allocator);
while (p.lexer.token != .t_close_bracket) {
if (exprs.items.len > 0) {
@@ -121,11 +122,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
}
- if (p.parseExpr()) |expr| {
- try exprs.append(expr);
- } else {
- break;
- }
+ exprs.append(p.parseExpr()) catch unreachable;
}
if (p.lexer.has_newline_before) {
@@ -137,8 +134,8 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
.t_open_brace => {
p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
- var properties = List(G.Property).init(p.allocator);
- var duplicates = std.StringHashMap(u0).init(p.allocator);
+ var properties = std.ArrayList(G.Property).init(p.allocator);
+ var duplicates = std.StringHashMap(u1).init(p.allocator);
while (p.lexer.token != .t_close_brace) {
if (properties.items.len > 0) {
@@ -153,17 +150,17 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
var key_range = p.lexer.range();
var key = p.e(E.String{ .value = key_string }, key_range.loc);
p.lexer.expect(.t_string_literal);
- var key_text = p.lexer.utf16ToString();
+ var key_text = p.lexer.utf16ToString(key_string);
// Warn about duplicate keys
- if (duplicates.contains(key_text)) {
- p.log.addRangeWarningFmt(p.source, r, "Duplicate key \"{s}\" in object literal", .{key_text}) catch unreachable;
- } else {
- duplicates.put(key_text, 0) catch unreachable;
+
+ const entry = duplicates.getOrPut(key_text) catch unreachable;
+ if (entry.found_existing) {
+ p.log.addRangeWarningFmt(p.source.*, key_range, p.allocator, "Duplicate key \"{s}\" in object literal", .{key_text}) catch unreachable;
}
p.lexer.expect(.t_colon);
- var value = p.parseExpr() orelse return null;
- try properties.append(G.Property{ .key = key, .value = value });
+ var value = p.parseExpr();
+ properties.append(G.Property{ .key = key, .value = value }) catch unreachable;
}
is_single_line = if (p.lexer.has_newline_before) false else is_single_line;
@@ -175,7 +172,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
},
else => {
p.lexer.unexpected();
- return null;
+ return p.e(E.Missing{}, loc);
},
}
}
@@ -186,7 +183,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
if (p.lexer.token == closer) {
if (!opts.allow_trailing_commas) {
- p.log.addRangeError(p.source, comma_range, "JSON does not support trailing commas") catch unreachable;
+ p.log.addRangeError(p.source.*, comma_range, "JSON does not support trailing commas") catch unreachable;
}
return false;
}
@@ -199,14 +196,99 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
const JSONParser = JSONLikeParser(js_lexer.JSONOptions{});
const TSConfigParser = JSONLikeParser(js_lexer.JSONOptions{ .allow_comments = true, .allow_trailing_commas = true });
-pub fn ParseJSON(log: logger.Log, source: logger.Source) !?Expr {
- var parser = JSONParser.init(allocator, log, source);
+pub fn ParseJSON(source: *logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !Expr {
+ var parser = try JSONParser.init(allocator, source, log);
+
+ return parser.parseExpr();
+}
+
+pub fn ParseTSConfig(log: logger.Loc, source: logger.Source, allocator: *std.mem.Allocator) !Expr {
+ var parser = try TSConfigParser.init(allocator, log, source);
+
+ return parser.parseExpr();
+}
+
+const duplicateKeyJson = "{ \"name\": \"valid\", \"name\": \"invalid\" }";
+
+fn expectPrintedJSON(_contents: string, expected: string) void {
+ if (alloc.dynamic_manager == null) {
+ alloc.setup(std.heap.page_allocator) catch unreachable;
+ }
+
+ var contents = alloc.dynamic.alloc(u8, _contents.len + 1) catch unreachable;
+ std.mem.copy(u8, contents, _contents);
+ contents[contents.len - 1] = ';';
+ var log = logger.Log.init(alloc.dynamic);
+ const js_printer = @import("js_printer.zig");
+ const renamer = @import("renamer.zig");
+
+ var source = logger.Source.initPathString(
+ "source.json",
+ contents,
+ );
+ const expr = try ParseJSON(&source, &log, alloc.dynamic);
+ var stmt = Stmt.alloc(std.heap.page_allocator, S.SExpr{ .value = expr }, logger.Loc{ .start = 0 });
+
+ var part = js_ast.Part{
+ .stmts = &([_]Stmt{stmt}),
+ };
+ const tree = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
+ var symbol_map = Symbol.Map{};
+ if (log.msgs.items.len > 0) {
+ std.debug.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
+ }
+
+ const result = js_printer.printAst(std.heap.page_allocator, tree, symbol_map, true, js_printer.Options{ .to_module_ref = Ref{ .inner_index = 0 } }) catch unreachable;
+
+ var js = result.js;
+
+ if (js.len > 1) {
+ while (js[js.len - 1] == '\n') {
+ js = js[0 .. js.len - 1];
+ }
+
+ if (js[js.len - 1] == ';') {
+ js = js[0 .. js.len - 1];
+ }
+ }
+
+ std.testing.expectEqualStrings(expected, js);
+}
- return try parser.parseExpr();
+test "ParseJSON" {
+ expectPrintedJSON("true", "true");
+ expectPrintedJSON("false", "false");
}
-pub fn ParseTSConfig(log: logger.Loc, source: logger.Source) !?Expr {
- var parser = TSConfigParser.init(allocator, log, source);
+test "ParseJSON DuplicateKey warning" {
+ alloc.setup(std.heap.page_allocator) catch unreachable;
+ var log = logger.Log.init(alloc.dynamic);
+
+ var source = logger.Source.initPathString(
+ "package.json",
+ duplicateKeyJson,
+ );
+ const expr = try ParseJSON(&source, &log, alloc.dynamic);
+
+ const tag = @as(Expr.Tag, expr.data);
+ expect(tag == .e_object);
+ const object = expr.data.e_object;
+ std.testing.expectEqual(@as(usize, 2), object.properties.len);
+ const name1 = object.properties[0];
+ expect(name1.key != null);
+ expect(name1.value != null);
+ expect(Expr.Tag.e_string == @as(Expr.Tag, name1.value.?.data));
+ expect(Expr.Tag.e_string == @as(Expr.Tag, name1.key.?.data));
+ expect(strings.eqlUtf16("name", name1.key.?.data.e_string.value));
+ expect(strings.eqlUtf16("valid", name1.value.?.data.e_string.value));
+
+ const name2 = object.properties[1];
+ expect(name2.key != null);
+ expect(name2.value != null);
+ expect(Expr.Tag.e_string == @as(Expr.Tag, name2.value.?.data));
+ expect(Expr.Tag.e_string == @as(Expr.Tag, name2.key.?.data));
+ expect(strings.eqlUtf16("name", name2.key.?.data.e_string.value));
+ std.testing.expectEqualStrings("invalid", try name2.value.?.data.e_string.string(alloc.dynamic));
- return try parser.parseExpr();
+ std.testing.expectEqual(@as(usize, 1), log.msgs.items.len);
}