aboutsummaryrefslogtreecommitdiff
path: root/src/json_parser.zig
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-09-09 23:33:34 -0700
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-09-09 23:33:34 -0700
commitfc907e2f81698d89502fb2ee0375e6d98a492c13 (patch)
treee775c2479b334ec901f61b5c0ccfab0102ff679e /src/json_parser.zig
parent8a02ad48a5eb1319c1bf3e9eb97e013924db875f (diff)
downloadbun-fc907e2f81698d89502fb2ee0375e6d98a492c13.tar.gz
bun-fc907e2f81698d89502fb2ee0375e6d98a492c13.tar.zst
bun-fc907e2f81698d89502fb2ee0375e6d98a492c13.zip
Diffstat (limited to 'src/json_parser.zig')
-rw-r--r--src/json_parser.zig12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/json_parser.zig b/src/json_parser.zig
index 8f8f415f3..4bb20f1cc 100644
--- a/src/json_parser.zig
+++ b/src/json_parser.zig
@@ -142,7 +142,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
try p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
var properties = std.ArrayList(G.Property).init(p.allocator);
- var duplicates = std.BufSet.init(p.allocator);
+ var duplicates = std.AutoHashMap(u64, void).init(p.allocator);
defer duplicates.deinit();
while (p.lexer.token != .t_close_brace) {
@@ -159,13 +159,13 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
var str = p.lexer.toEString();
- const is_duplicate = duplicates.contains(p.lexer.string_literal_slice);
- if (!is_duplicate) {
- duplicates.insert(p.lexer.string_literal_slice) catch unreachable;
- }
+ const hash_key = str.hash();
+ const duplicate_get_or_put = duplicates.getOrPut(hash_key) catch unreachable;
+ duplicate_get_or_put.key_ptr.* = hash_key;
+
var key_range = p.lexer.range();
// Warn about duplicate keys
- if (is_duplicate) {
+ if (duplicate_get_or_put.found_existing) {
p.log.addRangeWarningFmt(p.source, key_range, p.allocator, "Duplicate key \"{s}\" in object literal", .{p.lexer.string_literal_slice}) catch unreachable;
}