aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/baby_list.zig24
-rw-r--r--src/bun.js/api/server.zig6
-rw-r--r--src/bun.js/webcore/streams.zig4
-rw-r--r--src/install/install.zig5
-rw-r--r--src/install/lockfile.zig16
-rw-r--r--src/js_ast.zig56
-rw-r--r--src/js_lexer.zig2
-rw-r--r--src/js_printer.zig107
-rw-r--r--src/json_parser.zig80
-rw-r--r--src/logger.zig4
10 files changed, 186 insertions, 118 deletions
diff --git a/src/baby_list.zig b/src/baby_list.zig
index cb01c0dca..99844d72e 100644
--- a/src/baby_list.zig
+++ b/src/baby_list.zig
@@ -24,11 +24,10 @@ pub fn BabyList(comptime Type: type) type {
}
pub fn contains(this: @This(), item: []const Type) bool {
- return @ptrToInt(item.ptr) >= @ptrToInt(this.ptr) and @ptrToInt(item.ptr) < @ptrToInt(this.ptr) + this.len;
+ return this.len > 0 and @ptrToInt(item.ptr) >= @ptrToInt(this.ptr) and @ptrToInt(item.ptr) < @ptrToInt(this.ptr) + this.len;
}
- pub inline fn init(items: []const Type) ListType {
- @setRuntimeSafety(false);
+ pub inline fn initConst(items: []const Type) ListType {
return ListType{
// Remove the const qualifier from the items
.ptr = @intToPtr([*]Type, @ptrToInt(items.ptr)),
@@ -38,9 +37,15 @@ pub fn BabyList(comptime Type: type) type {
};
}
- pub inline fn fromList(list_: anytype) ListType {
- @setRuntimeSafety(false);
+ pub inline fn init(items: []Type) ListType {
+ return ListType{
+ .ptr = items.ptr,
+ .len = @truncate(u32, items.len),
+ .cap = @truncate(u32, items.len),
+ };
+ }
+ pub inline fn fromList(list_: anytype) ListType {
if (comptime Environment.allow_assert) {
std.debug.assert(list_.items.len <= list_.capacity);
}
@@ -53,10 +58,11 @@ pub fn BabyList(comptime Type: type) type {
}
pub fn update(this: *ListType, list_: anytype) void {
- @setRuntimeSafety(false);
- this.ptr = list_.items.ptr;
- this.len = @truncate(u32, list_.items.len);
- this.cap = @truncate(u32, list_.capacity);
+ this.* = .{
+ .ptr = list_.items.ptr,
+ .len = @truncate(u32, list_.items.len),
+ .cap = @truncate(u32, list_.capacity),
+ };
if (comptime Environment.allow_assert) {
std.debug.assert(this.len <= this.cap);
diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig
index 3a50a472f..384db3a60 100644
--- a/src/bun.js/api/server.zig
+++ b/src/bun.js/api/server.zig
@@ -1449,7 +1449,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
.sink = .{
.res = this.resp,
.allocator = this.allocator,
- .buffer = bun.ByteList.init(""),
+ .buffer = bun.ByteList{},
},
};
var signal = &response_stream.sink.signal;
@@ -2291,14 +2291,14 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
if (!last) {
readable.ptr.Bytes.onData(
.{
- .temporary = bun.ByteList.init(chunk),
+ .temporary = bun.ByteList.initConst(chunk),
},
bun.default_allocator,
);
} else {
readable.ptr.Bytes.onData(
.{
- .temporary_and_done = bun.ByteList.init(chunk),
+ .temporary_and_done = bun.ByteList.initConst(chunk),
},
bun.default_allocator,
);
diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig
index 6428fb566..f4f4ffb7c 100644
--- a/src/bun.js/webcore/streams.zig
+++ b/src/bun.js/webcore/streams.zig
@@ -2058,10 +2058,10 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type {
}
if (str.is16Bit()) {
- return this.sink.writeUTF16(.{ .temporary = bun.ByteList.init(std.mem.sliceAsBytes(str.utf16SliceAligned())) }).toJS(globalThis);
+ return this.sink.writeUTF16(.{ .temporary = bun.ByteList.initConst(std.mem.sliceAsBytes(str.utf16SliceAligned())) }).toJS(globalThis);
}
- return this.sink.writeLatin1(.{ .temporary = bun.ByteList.init(str.slice()) }).toJS(globalThis);
+ return this.sink.writeLatin1(.{ .temporary = bun.ByteList.initConst(str.slice()) }).toJS(globalThis);
}
pub fn writeUTF8(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
diff --git a/src/install/install.zig b/src/install/install.zig
index 8bb0828c7..cd915a900 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -5988,7 +5988,10 @@ pub const PackageManager = struct {
.global_bin_dir = this.options.global_bin_dir,
.force_install = force_install,
.install_count = lockfile.buffers.hoisted_packages.items.len,
- .successfully_installed = try Bitset.initEmpty(lockfile.packages.len, this.allocator),
+ .successfully_installed = try Bitset.initEmpty(
+ this.allocator,
+ lockfile.packages.len,
+ ),
};
const cwd = std.fs.cwd();
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index 93d81e18e..29a5b8e50 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -642,7 +642,10 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
invalid_package_id,
);
var clone_queue_ = PendingResolutions.init(old.allocator);
- new.unique_packages = try Bitset.initEmpty(old.unique_packages.bit_length, old.allocator);
+ new.unique_packages = try Bitset.initEmpty(
+ old.allocator,
+ old.unique_packages.bit_length,
+ );
var cloner = Cloner{
.old = old,
.lockfile = new,
@@ -993,7 +996,10 @@ pub const Printer = struct {
writer: Writer,
comptime enable_ansi_colors: bool,
) !void {
- var visited = try Bitset.initEmpty(this.lockfile.packages.len, this.lockfile.allocator);
+ var visited = try Bitset.initEmpty(
+ this.lockfile.allocator,
+ this.lockfile.packages.len,
+ );
var slice = this.lockfile.packages.slice();
const names: []const String = slice.items(.name);
@@ -1466,7 +1472,7 @@ pub fn initEmpty(this: *Lockfile, allocator: std.mem.Allocator) !void {
.packages = Lockfile.Package.List{},
.buffers = Buffers{},
.package_index = PackageIndex.Map.initContext(allocator, .{}),
- .unique_packages = try Bitset.initFull(0, allocator),
+ .unique_packages = try Bitset.initFull(allocator, 0),
.string_pool = StringPool.init(allocator),
.allocator = allocator,
.scratch = Scratch.init(allocator),
@@ -1535,7 +1541,7 @@ pub fn getPackageID(
}
pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !void {
- if (this.unique_packages.capacity() < this.packages.len) try this.unique_packages.resize(this.packages.len, true, this.allocator);
+ if (this.unique_packages.capacity() < this.packages.len) try this.unique_packages.resize(this.allocator, this.packages.len, true);
var gpe = try this.package_index.getOrPut(name_hash);
if (gpe.found_existing) {
@@ -3272,7 +3278,7 @@ pub const Serializer = struct {
{
lockfile.package_index = PackageIndex.Map.initContext(allocator, .{});
- lockfile.unique_packages = try Bitset.initFull(lockfile.packages.len, allocator);
+ lockfile.unique_packages = try Bitset.initFull(allocator, lockfile.packages.len);
lockfile.string_pool = StringPool.initContext(allocator, .{});
try lockfile.package_index.ensureTotalCapacity(@truncate(u32, lockfile.packages.len));
var slice = lockfile.packages.slice();
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 318c67704..7e5d346e2 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -548,13 +548,6 @@ pub const G = struct {
};
pub const Property = struct {
- class_static_block: ?*ClassStaticBlock = null,
- ts_decorators: ExprNodeList = ExprNodeList{},
- // Key is optional for spread
- key: ?ExprNodeIndex = null,
-
- // This is omitted for class fields
- value: ?ExprNodeIndex = null,
// This is used when parsing a pattern that uses default values:
//
@@ -569,6 +562,14 @@ pub const G = struct {
kind: Kind = Kind.normal,
flags: Flags.Property.Set = Flags.Property.None,
+ class_static_block: ?*ClassStaticBlock = null,
+ ts_decorators: ExprNodeList = ExprNodeList{},
+ // Key is optional for spread
+ key: ?ExprNodeIndex = null,
+
+ // This is omitted for class fields
+ value: ?ExprNodeIndex = null,
+
pub const List = BabyList(Property);
pub const Kind = enum(u3) {
@@ -1548,19 +1549,19 @@ pub const E = struct {
const PackageJSONSort = struct {
const Fields = enum(u8) {
- name,
- version,
- author,
- repository,
- config,
- main,
- module,
- dependencies,
- devDependencies,
- optionalDependencies,
- peerDependencies,
- exports,
- __fake,
+ name = 0,
+ version = 1,
+ author = 2,
+ repository = 3,
+ config = 4,
+ main = 5,
+ module = 6,
+ dependencies = 7,
+ devDependencies = 8,
+ optionalDependencies = 9,
+ peerDependencies = 10,
+ exports = 11,
+ __fake = 12,
pub const Map = ComptimeStringMap(Fields, .{
.{ "name", Fields.name },
@@ -1576,7 +1577,6 @@ pub const E = struct {
.{ "peerDependencies", Fields.peerDependencies },
.{ "exports", Fields.exports },
});
- const max_key_size = 12;
pub fn isLessThan(ctx: void, lhs: G.Property, rhs: G.Property) bool {
var lhs_key_size: u8 = @enumToInt(Fields.__fake);
@@ -2187,6 +2187,13 @@ pub const Stmt = struct {
has_inited = false;
}
+ pub fn assert() void {
+ if (comptime Environment.allow_assert) {
+ if (!has_inited)
+ bun.unreachablePanic("Store must be init'd", .{});
+ }
+ }
+
pub fn append(comptime ValueType: type, value: anytype) *ValueType {
return All.append(ValueType, value);
}
@@ -3866,6 +3873,13 @@ pub const Expr = struct {
_ = All.init(allocator);
}
+ pub fn assert() void {
+ if (comptime Environment.allow_assert) {
+ if (!has_inited)
+ bun.unreachablePanic("Store must be init'd", .{});
+ }
+ }
+
pub fn reset() void {
if (disable_reset) return;
All.reset();
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index 6ff8c1ec5..7764b0c4d 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -3018,7 +3018,7 @@ fn indexOfInterestingCharacterInStringLiteral(text_: []const u8, quote: u8) ?usi
@bitCast(V1x16, backslash == vec);
if (@reduce(.Max, any_significant) > 0) {
- const bitmask = @ptrCast(*const u16, &any_significant).*;
+ const bitmask = @bitCast(u16, any_significant);
const first = @ctz(bitmask);
std.debug.assert(first < strings.ascii_vector_size);
return first + (@ptrToInt(text.ptr) - @ptrToInt(text_.ptr));
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 1f2237ab3..0e5ee1716 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -2069,22 +2069,30 @@ pub fn NewPrinter(
}
p.addSourceMapping(expr.loc);
p.print("{");
- const props = e.properties.slice();
+ const props = expr.data.e_object.properties.slice();
if (props.len > 0) {
p.options.indent += @as(usize, @boolToInt(!e.is_single_line));
- for (props) |property, i| {
- if (i != 0) {
+ if (e.is_single_line) {
+ p.printSpace();
+ } else {
+ p.printNewline();
+ p.printIndent();
+ }
+ p.printProperty(props[0]);
+
+ if (props.len > 1) {
+ for (props[1..]) |property| {
p.print(",");
- }
- if (e.is_single_line) {
- p.printSpace();
- } else {
- p.printNewline();
- p.printIndent();
+ if (e.is_single_line) {
+ p.printSpace();
+ } else {
+ p.printNewline();
+ p.printIndent();
+ }
+ p.printProperty(property);
}
- p.printProperty(property);
}
if (!e.is_single_line) {
@@ -2621,44 +2629,48 @@ pub fn NewPrinter(
},
else => {},
}
- }
- if (item.value) |val| {
- switch (val.data) {
- .e_function => |func| {
- if (item.flags.contains(.is_method)) {
- if (func.func.flags.contains(.is_async)) {
- p.printSpaceBeforeIdentifier();
- p.print("async");
- }
+ if (item.value) |val| {
+ switch (val.data) {
+ .e_function => |func| {
+ if (item.flags.contains(.is_method)) {
+ if (func.func.flags.contains(.is_async)) {
+ p.printSpaceBeforeIdentifier();
+ p.print("async");
+ }
- if (func.func.flags.contains(.is_generator)) {
- p.print("*");
- }
+ if (func.func.flags.contains(.is_generator)) {
+ p.print("*");
+ }
- if (func.func.flags.contains(.is_generator) and func.func.flags.contains(.is_async)) {
- p.printSpace();
+ if (func.func.flags.contains(.is_generator) and func.func.flags.contains(.is_async)) {
+ p.printSpace();
+ }
}
- }
- },
- else => {},
- }
+ },
+ else => {},
+ }
- // If var is declared in a parent scope and var is then written via destructuring pattern, key is null
- // example:
- // var foo = 1;
- // if (true) {
- // var { foo } = { foo: 2 };
- // }
- if (item.key == null) {
- p.printExpr(val, .comma, ExprFlag.None());
- return;
+ // If var is declared in a parent scope and var is then written via destructuring pattern, key is null
+ // example:
+ // var foo = 1;
+ // if (true) {
+ // var { foo } = { foo: 2 };
+ // }
+ if (item.key == null) {
+ p.printExpr(val, .comma, ExprFlag.None());
+ return;
+ }
}
}
const _key = item.key.?;
if (item.flags.contains(.is_computed)) {
+ if (comptime is_json) {
+ unreachable;
+ }
+
p.print("[");
p.printExpr(_key, .comma, ExprFlag.None());
p.print("]");
@@ -2687,6 +2699,10 @@ pub fn NewPrinter(
switch (_key.data) {
.e_private_identifier => |priv| {
+ if (comptime is_json) {
+ unreachable;
+ }
+
p.addSourceMapping(_key.loc);
p.printSymbol(priv.ref);
},
@@ -2792,11 +2808,19 @@ pub fn NewPrinter(
}
},
else => {
+ if (comptime is_json) {
+ unreachable;
+ }
+
p.printExpr(_key, .lowest, ExprFlag.Set{});
},
}
if (item.kind != .normal) {
+ if (comptime is_json) {
+ bun.unreachablePanic("item.kind must be normal in json, received: {any}", .{item.kind});
+ }
+
switch (item.value.?.data) {
.e_function => |func| {
p.printFunc(func.func);
@@ -2823,6 +2847,10 @@ pub fn NewPrinter(
p.printExpr(val, .comma, ExprFlag.Set{});
}
+ if (comptime is_json) {
+ std.debug.assert(item.initializer == null);
+ }
+
if (item.initializer) |initial| {
p.printInitializer(initial);
}
@@ -5211,6 +5239,9 @@ pub fn printJSON(
var stmts = &[_]js_ast.Stmt{stmt};
var parts = &[_]js_ast.Part{.{ .stmts = stmts }};
const ast = Ast.initTest(parts);
+ var arena = std.heap.ArenaAllocator.init(bun.default_allocator);
+ defer arena.deinit();
+ var allocator = arena.allocator();
var printer = try PrinterType.init(
writer,
&ast,
@@ -5218,7 +5249,7 @@ pub fn printJSON(
std.mem.zeroes(Symbol.Map),
.{},
null,
- undefined,
+ allocator,
);
printer.printExpr(expr, Level.lowest, ExprFlag.Set{});
diff --git a/src/json_parser.zig b/src/json_parser.zig
index 619590c0f..9ce9fea7c 100644
--- a/src/json_parser.zig
+++ b/src/json_parser.zig
@@ -84,6 +84,26 @@ const HashMapPool = struct {
}
};
+fn newExpr(t: anytype, loc: logger.Loc) Expr {
+ const Type = @TypeOf(t);
+ if (comptime @typeInfo(Type) == .Pointer) {
+ @compileError("Unexpected pointer");
+ }
+
+ if (comptime Environment.allow_assert) {
+ if (comptime Type == E.Object) {
+ for (t.properties.slice()) |prop| {
+ // json should never have an initializer set
+ std.debug.assert(prop.initializer == null);
+ std.debug.assert(prop.key != null);
+ std.debug.assert(prop.value != null);
+ }
+ }
+ }
+
+ return Expr.init(Type, t, loc);
+}
+
// This hack fixes using LLDB
fn JSONLikeParser(comptime opts: js_lexer.JSONOptions) type {
return JSONLikeParser_(
@@ -123,6 +143,9 @@ fn JSONLikeParser_(
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, source_: logger.Source, log: *logger.Log) !Parser {
+ Expr.Data.Store.assert();
+ Stmt.Data.Store.assert();
+
return Parser{
.lexer = try Lexer.init(log, source_, allocator),
.allocator = allocator,
@@ -136,33 +159,25 @@ fn JSONLikeParser_(
const Parser = @This();
- pub fn e(_: *Parser, t: anytype, loc: logger.Loc) Expr {
- const Type = @TypeOf(t);
- if (@typeInfo(Type) == .Pointer) {
- return Expr.init(std.meta.Child(Type), t.*, loc);
- } else {
- return Expr.init(Type, t, loc);
- }
- }
pub fn parseExpr(p: *Parser, comptime maybe_auto_quote: bool, comptime force_utf8: bool) anyerror!Expr {
const loc = p.lexer.loc();
switch (p.lexer.token) {
.t_false => {
try p.lexer.next();
- return p.e(E.Boolean{
+ return newExpr(E.Boolean{
.value = false,
}, loc);
},
.t_true => {
try p.lexer.next();
- return p.e(E.Boolean{
+ return newExpr(E.Boolean{
.value = true,
}, loc);
},
.t_null => {
try p.lexer.next();
- return p.e(E.Null{}, loc);
+ return newExpr(E.Null{}, loc);
},
.t_string_literal => {
var str: E.String = p.lexer.toEString();
@@ -171,18 +186,18 @@ fn JSONLikeParser_(
}
try p.lexer.next();
- return p.e(str, loc);
+ return newExpr(str, loc);
},
.t_numeric_literal => {
const value = p.lexer.number;
try p.lexer.next();
- return p.e(E.Number{ .value = value }, loc);
+ return newExpr(E.Number{ .value = value }, loc);
},
.t_minus => {
try p.lexer.next();
const value = p.lexer.number;
try p.lexer.expect(.t_numeric_literal);
- return p.e(E.Number{ .value = -value }, loc);
+ return newExpr(E.Number{ .value = -value }, loc);
},
.t_open_bracket => {
try p.lexer.next();
@@ -211,7 +226,7 @@ fn JSONLikeParser_(
is_single_line = false;
}
try p.lexer.expect(.t_close_bracket);
- return p.e(E.Array{
+ return newExpr(E.Array{
.items = ExprNodeList.fromList(exprs),
.is_single_line = is_single_line,
.was_originally_macro = comptime opts.was_originally_macro,
@@ -261,7 +276,7 @@ fn JSONLikeParser_(
p.lexer.toEString();
const key_range = p.lexer.range();
- const key = p.e(str, key_range.loc);
+ const key = newExpr(str, key_range.loc);
try p.lexer.expect(.t_string_literal);
if (comptime opts.json_warn_duplicate_keys) {
@@ -281,6 +296,7 @@ fn JSONLikeParser_(
.key = key,
.value = value,
.kind = js_ast.G.Property.Kind.normal,
+ .initializer = null,
}) catch unreachable;
}
@@ -288,7 +304,7 @@ fn JSONLikeParser_(
is_single_line = false;
}
try p.lexer.expect(.t_close_brace);
- return p.e(E.Object{
+ return newExpr(E.Object{
.properties = G.Property.List.fromList(properties),
.is_single_line = is_single_line,
.was_originally_macro = comptime opts.was_originally_macro,
@@ -375,52 +391,44 @@ pub const PackageJSONVersionChecker = struct {
const Parser = @This();
- pub fn e(_: *Parser, t: anytype, loc: logger.Loc) Expr {
- const Type = @TypeOf(t);
- if (@typeInfo(Type) == .Pointer) {
- return Expr.init(std.meta.Child(Type), t.*, loc);
- } else {
- return Expr.init(Type, t, loc);
- }
- }
pub fn parseExpr(p: *Parser) anyerror!Expr {
const loc = p.lexer.loc();
- if (p.has_found_name and p.has_found_version) return p.e(E.Missing{}, loc);
+ if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc);
switch (p.lexer.token) {
.t_false => {
try p.lexer.next();
- return p.e(E.Boolean{
+ return newExpr(E.Boolean{
.value = false,
}, loc);
},
.t_true => {
try p.lexer.next();
- return p.e(E.Boolean{
+ return newExpr(E.Boolean{
.value = true,
}, loc);
},
.t_null => {
try p.lexer.next();
- return p.e(E.Null{}, loc);
+ return newExpr(E.Null{}, loc);
},
.t_string_literal => {
var str: E.String = p.lexer.toEString();
try p.lexer.next();
- return p.e(str, loc);
+ return newExpr(str, loc);
},
.t_numeric_literal => {
const value = p.lexer.number;
try p.lexer.next();
- return p.e(E.Number{ .value = value }, loc);
+ return newExpr(E.Number{ .value = value }, loc);
},
.t_minus => {
try p.lexer.next();
const value = p.lexer.number;
try p.lexer.expect(.t_numeric_literal);
- return p.e(E.Number{ .value = -value }, loc);
+ return newExpr(E.Number{ .value = -value }, loc);
},
.t_open_bracket => {
try p.lexer.next();
@@ -438,7 +446,7 @@ pub const PackageJSONVersionChecker = struct {
}
try p.lexer.expect(.t_close_bracket);
- return p.e(E.Missing{}, loc);
+ return newExpr(E.Missing{}, loc);
},
.t_open_brace => {
try p.lexer.next();
@@ -456,7 +464,7 @@ pub const PackageJSONVersionChecker = struct {
const str = p.lexer.toEString();
const key_range = p.lexer.range();
- const key = p.e(str, key_range.loc);
+ const key = newExpr(str, key_range.loc);
try p.lexer.expect(.t_string_literal);
try p.lexer.expect(.t_colon);
@@ -487,12 +495,12 @@ pub const PackageJSONVersionChecker = struct {
}
}
- if (p.has_found_name and p.has_found_version) return p.e(E.Missing{}, loc);
+ if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc);
has_properties = true;
}
try p.lexer.expect(.t_close_brace);
- return p.e(E.Missing{}, loc);
+ return newExpr(E.Missing{}, loc);
},
else => {
try p.lexer.unexpected();
diff --git a/src/logger.zig b/src/logger.zig
index e2e14ed48..5f4cf17bf 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -63,7 +63,7 @@ pub const Kind = enum(i8) {
}
};
-pub const Loc = packed struct {
+pub const Loc = struct {
start: i32 = -1,
pub inline fn toNullable(loc: *Loc) ?Loc {
@@ -548,7 +548,7 @@ pub const Msg = struct {
}
};
-pub const Range = packed struct {
+pub const Range = struct {
loc: Loc = Loc.Empty,
len: i32 = 0,
pub const None = Range{ .loc = Loc.Empty, .len = 0 };