aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-03-06 07:35:16 -0800
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-03-06 07:35:16 -0800
commit7c5c6cd5192acde43006070e740bbe51cfd49255 (patch)
tree53f1e3cb999a477791dd76f4f2bedd3c56084756 /src
parent093807391a9563ad36c2b04a286da23d09fad835 (diff)
downloadbun-7c5c6cd5192acde43006070e740bbe51cfd49255.tar.gz
bun-7c5c6cd5192acde43006070e740bbe51cfd49255.tar.zst
bun-7c5c6cd5192acde43006070e740bbe51cfd49255.zip
source maps work for app code in `bun dev`!
Diffstat (limited to 'src')
-rw-r--r--src/bench/string-handling.zig71
-rw-r--r--src/bundler.zig68
-rw-r--r--src/cli/colon_list_type.zig4
-rw-r--r--src/emcc_main.c2
-rw-r--r--src/fs.zig13
-rw-r--r--src/http.zig234
-rw-r--r--src/http/mime_type.zig1
-rw-r--r--src/http/url_path.zig39
-rw-r--r--src/http/websocket.zig27
-rw-r--r--src/js_ast.zig4
-rw-r--r--src/js_lexer/identifier.zig4
-rw-r--r--src/js_printer.zig255
-rw-r--r--src/runtime/hmr.ts67
-rw-r--r--src/string_immutable.zig332
-rw-r--r--src/string_joiner.zig80
-rw-r--r--src/string_mutable.zig35
-rw-r--r--src/thread_pool.zig6
17 files changed, 1090 insertions, 152 deletions
diff --git a/src/bench/string-handling.zig b/src/bench/string-handling.zig
new file mode 100644
index 000000000..b24af19b9
--- /dev/null
+++ b/src/bench/string-handling.zig
@@ -0,0 +1,71 @@
+const strings = @import("strings");
+const std = @import("std");
+
+pub fn main() anyerror!void {
+ const args = try std.process.argsAlloc(std.heap.c_allocator);
+ const filepath = args[args.len - 3];
+ const find = args[args.len - 2];
+ const amount = try std.fmt.parseInt(usize, args[args.len - 1], 10);
+ var file = try std.fs.cwd().openFile(filepath, .{ .mode = .read_only });
+ var contents = try file.readToEndAlloc(std.heap.c_allocator, std.math.maxInt(usize));
+
+ {
+ var timer = try std.time.Timer.start();
+ var index: usize = std.math.maxInt(usize);
+ var j: usize = 0;
+ var i: usize = 0;
+ while (j < amount) : (j += 1) {
+ i = 0;
+ if (strings.indexOf(contents, find)) |k| {
+ i += k;
+ index = k;
+ }
+ }
+
+ if (index == std.math.maxInt(usize)) {
+ std.debug.print("<vec 32> [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) });
+ } else {
+ std.debug.print("<vec 32> [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) });
+ }
+ }
+
+ {
+ var timer = try std.time.Timer.start();
+ var index: usize = std.math.maxInt(usize);
+ var j: usize = 0;
+ var i: usize = 0;
+ while (j < amount) : (j += 1) {
+ i = 0;
+ if (strings.indexOf16(contents, find)) |k| {
+ i += k;
+ index = k;
+ }
+ }
+
+ if (index == std.math.maxInt(usize)) {
+ std.debug.print("<vec 16> [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) });
+ } else {
+ std.debug.print("<vec 16> [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) });
+ }
+ }
+
+ {
+ var timer = try std.time.Timer.start();
+ var index: usize = std.math.maxInt(usize);
+ var j: usize = 0;
+ var i: usize = 0;
+ while (j < amount) : (j += 1) {
+ i = 0;
+ if (std.mem.indexOf(u8, contents, find)) |k| {
+ i += k;
+ index = k;
+ }
+ }
+
+ if (index == std.math.maxInt(usize)) {
+ std.debug.print("<std> [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) });
+ } else {
+ std.debug.print("<std> [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) });
+ }
+ }
+}
diff --git a/src/bundler.zig b/src/bundler.zig
index 437dd3c9d..8910403f9 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -2277,6 +2277,8 @@ pub const Bundler = struct {
watcher: *WatcherType,
client_entry_point: ?*EntryPoints.ClientEntryPoint,
origin: URL,
+ comptime is_source_map: bool,
+ source_map_handler: ?js_printer.SourceMapHandler,
) !BuildResolveResultPair {
if (resolve_result.is_external) {
return BuildResolveResultPair{
@@ -2394,17 +2396,21 @@ pub const Bundler = struct {
if (bundler.options.platform.isBun()) {
return BuildResolveResultPair{
.written = switch (result.ast.exports_kind) {
- .esm => try bundler.print(
+ .esm => try bundler.printWithSourceMapMaybe(
result,
Writer,
writer,
.esm_ascii,
+ is_source_map,
+ source_map_handler,
),
- .cjs => try bundler.print(
+ .cjs => try bundler.printWithSourceMapMaybe(
result,
Writer,
writer,
.cjs_ascii,
+ is_source_map,
+ source_map_handler,
),
else => unreachable,
},
@@ -2414,17 +2420,21 @@ pub const Bundler = struct {
return BuildResolveResultPair{
.written = switch (result.ast.exports_kind) {
- .none, .esm => try bundler.print(
+ .none, .esm => try bundler.printWithSourceMapMaybe(
result,
Writer,
writer,
.esm,
+ is_source_map,
+ source_map_handler,
),
- .cjs => try bundler.print(
+ .cjs => try bundler.printWithSourceMapMaybe(
result,
Writer,
writer,
.cjs,
+ is_source_map,
+ source_map_handler,
),
else => unreachable,
},
@@ -2598,12 +2608,14 @@ pub const Bundler = struct {
return output_file;
}
- pub fn print(
+ pub fn printWithSourceMapMaybe(
bundler: *ThisBundler,
result: ParseResult,
comptime Writer: type,
writer: Writer,
comptime format: js_printer.Format,
+ comptime enable_source_map: bool,
+ source_map_context: ?js_printer.SourceMapHandler,
) !usize {
const ast = result.ast;
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
@@ -2622,9 +2634,11 @@ pub const Bundler = struct {
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
.css_import_behavior = bundler.options.cssImportBehavior(),
+ .source_map_handler = source_map_context,
},
Linker,
&bundler.linker,
+ enable_source_map,
),
.esm => try js_printer.printAst(
@@ -2639,11 +2653,12 @@ pub const Bundler = struct {
.externals = ast.externals,
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
-
+ .source_map_handler = source_map_context,
.css_import_behavior = bundler.options.cssImportBehavior(),
},
Linker,
&bundler.linker,
+ enable_source_map,
),
.esm_ascii => try js_printer.printAst(
Writer,
@@ -2657,11 +2672,12 @@ pub const Bundler = struct {
.externals = ast.externals,
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
-
.css_import_behavior = bundler.options.cssImportBehavior(),
+ .source_map_handler = source_map_context,
},
Linker,
&bundler.linker,
+ enable_source_map,
),
.cjs_ascii => try js_printer.printCommonJS(
Writer,
@@ -2676,13 +2692,50 @@ pub const Bundler = struct {
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
.css_import_behavior = bundler.options.cssImportBehavior(),
+ .source_map_handler = source_map_context,
},
Linker,
&bundler.linker,
+ enable_source_map,
),
};
}
+ pub fn print(
+ bundler: *ThisBundler,
+ result: ParseResult,
+ comptime Writer: type,
+ writer: Writer,
+ comptime format: js_printer.Format,
+ ) !usize {
+ return bundler.printWithSourceMapMaybe(
+ result,
+ Writer,
+ writer,
+ format,
+ false,
+ null,
+ );
+ }
+
+ pub fn printWithSourceMap(
+ bundler: *ThisBundler,
+ result: ParseResult,
+ comptime Writer: type,
+ writer: Writer,
+ comptime format: js_printer.Format,
+ handler: js_printer.SourceMapHandler,
+ ) !usize {
+ return bundler.printWithSourceMapMaybe(
+ result,
+ Writer,
+ writer,
+ format,
+ true,
+ handler,
+ );
+ }
+
pub const ParseOptions = struct {
allocator: std.mem.Allocator,
dirname_fd: StoredFileDescriptorType,
@@ -3447,6 +3500,7 @@ pub const Transformer = struct {
},
?*anyopaque,
null,
+ false,
);
},
else => {
diff --git a/src/cli/colon_list_type.zig b/src/cli/colon_list_type.zig
index 73a3f1299..8290681ef 100644
--- a/src/cli/colon_list_type.zig
+++ b/src/cli/colon_list_type.zig
@@ -26,8 +26,8 @@ pub fn ColonListType(comptime t: type, value_resolver: anytype) type {
// Support either ":" or "=" as the separator, preferring whichever is first.
// ":" is less confusing IMO because that syntax is used with flags
// but "=" is what esbuild uses and I want this to be somewhat familiar for people using esbuild
- const midpoint = @minimum(strings.indexOfChar(str, ':') orelse std.math.maxInt(usize), strings.indexOfChar(str, '=') orelse std.math.maxInt(usize));
- if (midpoint == std.math.maxInt(usize)) {
+ const midpoint = @minimum(strings.indexOfChar(str, ':') orelse std.math.maxInt(u32), strings.indexOfChar(str, '=') orelse std.math.maxInt(u32));
+ if (midpoint == std.math.maxInt(u32)) {
return error.InvalidSeparator;
}
diff --git a/src/emcc_main.c b/src/emcc_main.c
index feb99b58a..225bf801c 100644
--- a/src/emcc_main.c
+++ b/src/emcc_main.c
@@ -1,5 +1,5 @@
#define STBI_ASSERT(x)
-#include <stdint.h>out.w
+#include <stdint.h>
#include <stdlib.h>
diff --git a/src/fs.zig b/src/fs.zig
index d5a100448..57f3855c2 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -616,6 +616,11 @@ pub const FileSystem = struct {
rfs.entries.remove(file_path);
}
+ pub const Limit = struct {
+ pub var handles: usize = 0;
+ pub var stack: usize = 0;
+ };
+
// Always try to max out how many files we can keep open
pub fn adjustUlimit() !usize {
const LIMITS = [_]std.os.rlimit_resource{ std.os.rlimit_resource.STACK, std.os.rlimit_resource.NOFILE };
@@ -627,7 +632,13 @@ pub const FileSystem = struct {
new_limit.cur = limit.max;
new_limit.max = limit.max;
- try std.os.setrlimit(limit_type, new_limit);
+ if (std.os.setrlimit(limit_type, new_limit)) {
+ if (i == 1) {
+ Limit.handles = limit.max;
+ } else {
+ Limit.stack = limit.max;
+ }
+ } else |_| {}
}
if (i == LIMITS.len - 1) return limit.max;
diff --git a/src/http.zig b/src/http.zig
index 45d56a70a..5c91793fc 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -40,6 +40,7 @@ const ArenaType = Arena;
const JSON = @import("./json_parser.zig");
const DateTime = @import("datetime");
const ThreadPool = @import("thread_pool");
+const SourceMap = @import("./sourcemap/sourcemap.zig");
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
@@ -62,7 +63,7 @@ pub const Headers = picohttp.Headers;
pub const MimeType = @import("./http/mime_type.zig");
const Bundler = bundler.Bundler;
const Websocket = @import("./http/websocket.zig");
-const js_printer = @import("./js_printer.zig");
+const JSPrinter = @import("./js_printer.zig");
const watcher = @import("./watcher.zig");
threadlocal var req_headers_buf: [100]picohttp.Header = undefined;
threadlocal var res_headers_buf: [100]picohttp.Header = undefined;
@@ -237,6 +238,18 @@ pub const RequestContext = struct {
return this.full_url;
}
+ pub fn getFullURLForSourceMap(this: *RequestContext) [:0]const u8 {
+ if (this.full_url.len == 0) {
+ if (this.origin.isAbsolute()) {
+ this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}.map", .{ this.origin.origin, this.request.path }) catch unreachable;
+ } else {
+ this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}.map", .{this.request.path}) catch unreachable;
+ }
+ }
+
+ return this.full_url;
+ }
+
pub fn handleRedirect(this: *RequestContext, url: string) !void {
this.appendHeader("Location", url);
defer this.done();
@@ -316,8 +329,8 @@ pub const RequestContext = struct {
@as(?*bundler.FallbackEntryPoint, &fallback_entry_point),
)) |*result| {
try bundler_.linker.link(fallback_entry_point.source.path, result, this.origin, .absolute_url, false);
- var buffer_writer = try js_printer.BufferWriter.init(default_allocator);
- var writer = js_printer.BufferPrinter.init(buffer_writer);
+ var buffer_writer = try JSPrinter.BufferWriter.init(default_allocator);
+ var writer = JSPrinter.BufferPrinter.init(buffer_writer);
_ = try bundler_.print(
result.*,
@TypeOf(&writer),
@@ -891,7 +904,7 @@ pub const RequestContext = struct {
watcher: *Watcher,
bundler: *Bundler,
allocator: std.mem.Allocator,
- printer: js_printer.BufferPrinter,
+ printer: JSPrinter.BufferPrinter,
timer: std.time.Timer,
count: usize = 0,
origin: ZigURL,
@@ -912,14 +925,8 @@ pub const RequestContext = struct {
};
};
pub fn build(this: *WatchBuilder, id: u32, from_timestamp: u32, allocator: std.mem.Allocator) !WatchBuildResult {
- if (this.count == 0) {
- var writer = try js_printer.BufferWriter.init(this.allocator);
- this.printer = js_printer.BufferPrinter.init(writer);
- this.printer.ctx.append_null_byte = false;
- }
-
defer this.count += 1;
-
+ this.printer.ctx.reset();
var log = logger.Log.init(allocator);
var watchlist_slice = this.watcher.watchlist.slice();
@@ -1621,17 +1628,17 @@ pub const RequestContext = struct {
clone.conn = ctx.conn.*;
try ctx.bundler.clone(server.allocator, &clone.bundler);
ctx.bundler = &clone.bundler;
+
clone.task = .{ .callback = onTask };
clone.message_buffer = try MutableString.init(server.allocator, 0);
clone.ctx.conn = &clone.conn;
clone.ctx.log = logger.Log.init(server.allocator);
clone.ctx.origin = ZigURL.parse(server.allocator.dupe(u8, ctx.origin.href) catch unreachable);
- var printer_writer = try js_printer.BufferWriter.init(server.allocator);
clone.builder = WatchBuilder{
.allocator = server.allocator,
.bundler = ctx.bundler,
- .printer = js_printer.BufferPrinter.init(printer_writer),
+ .printer = undefined,
.timer = ctx.timer,
.watcher = ctx.watcher,
.origin = clone.ctx.origin,
@@ -1710,36 +1717,45 @@ pub const RequestContext = struct {
Output.Source.configureThread();
js_ast.Stmt.Data.Store.create(default_allocator);
js_ast.Expr.Data.Store.create(default_allocator);
+ websocket_handler_caches = CacheSet.init(default_allocator);
+ websocket_printer = JSPrinter.BufferWriter.init(default_allocator) catch unreachable;
+
return null;
}
pub fn onTask(self: *ThreadPool.Task) void {
handle(@fieldParentPtr(WebsocketHandler, "task", self));
}
+ const CacheSet = @import("./cache.zig").Set;
+ threadlocal var websocket_handler_caches: CacheSet = undefined;
+ threadlocal var websocket_printer: JSPrinter.BufferWriter = undefined;
pub fn handle(self: *WebsocketHandler) void {
defer {
js_ast.Stmt.Data.Store.reset();
js_ast.Expr.Data.Store.reset();
}
- self.builder.printer = js_printer.BufferPrinter.init(
- js_printer.BufferWriter.init(self.ctx.allocator) catch unreachable,
+ self.builder.printer = JSPrinter.BufferPrinter.init(
+ websocket_printer,
);
+ self.builder.bundler.resolver.caches = websocket_handler_caches;
_handle(self) catch {};
}
fn _handle(handler: *WebsocketHandler) !void {
var ctx = &handler.ctx;
- defer handler.tombstone = true;
- defer removeWebsocket(handler);
- defer ctx.arena.deinit();
var is_socket_closed = false;
defer {
+ websocket_handler_caches = handler.builder.bundler.resolver.caches;
+ websocket_printer = handler.builder.printer.ctx;
+ handler.tombstone = true;
+ removeWebsocket(handler);
+ ctx.arena.deinit();
if (!is_socket_closed) {
ctx.conn.deinit();
}
+ Output.flush();
}
- defer Output.flush();
handler.checkUpgradeHeaders() catch |err| {
switch (err) {
@@ -1815,6 +1831,7 @@ pub const RequestContext = struct {
Output.prettyErrorln("<r><red>ERR:<r> <b>Websocket failed to write.<r>", .{});
}
}
+
while (!handler.tombstone) {
Output.flush();
@@ -1903,19 +1920,36 @@ pub const RequestContext = struct {
.len = 0,
};
+ // theres an issue where on the 4th or 5th build
+ // sometimes the final byte has incorrect data
+ // we never end up using all those bytes
+ if (handler.message_buffer.list.items.len > 0) {
+ @memset(
+ handler.message_buffer.list.items.ptr,
+ 0,
+ @minimum(handler.message_buffer.list.items.len, 128),
+ );
+ }
+
const build_result = handler.builder.build(request_id, cmd.timestamp, arena.allocator()) catch |err| {
if (err == error.MissingWatchID) {
msg.timestamp = cmd.timestamp;
msg.kind = Api.WebsocketMessageKind.resolve_file;
+
handler.message_buffer.reset();
var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer);
try msg.encode(&buffer_writer);
- _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
const resolve_id = Api.WebsocketMessageResolveId{ .id = request_id };
try resolve_id.encode(&buffer_writer);
head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len);
- try handler.websocket.writeHeader(head, handler.message_buffer.list.items.len);
- _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ var writer = buffer_writer.writable.writer();
+ const body_len = handler.message_buffer.list.items.len;
+ try head.writeHeader(&writer, body_len);
+ const buffers = handler.message_buffer.toSocketBuffers(2, .{
+ .{ body_len, handler.message_buffer.list.items.len },
+ .{ 0, body_len },
+ });
+ _ = try handler.conn.client.writeMessage(std.x.os.Socket.Message.fromBuffers(&buffers), SOCKET_FLAGS);
continue;
}
@@ -1959,25 +1993,43 @@ pub const RequestContext = struct {
handler.message_buffer.reset();
var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer);
try msg.encode(&buffer_writer);
+ var socket_buffers = std.mem.zeroes([4]std.x.os.Buffer);
+
+ var socket_buffer_count: usize = 2;
switch (build_result.value) {
.success => |success| {
try success.encode(&buffer_writer);
- const total = handler.message_buffer.list.items.len + build_result.bytes.len;
+ const total = handler.message_buffer.list.items.len + build_result.bytes.len + (if (build_result.bytes.len > 0) @as(usize, @sizeOf(u32)) else @as(usize, 0));
+ const first_message_len = handler.message_buffer.list.items.len;
head.len = Websocket.WebsocketHeader.packLength(total);
- try handler.websocket.writeHeader(head, total);
- _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ try head.writeHeader(&handler.message_buffer.writer(), total);
+ socket_buffers[0] = std.x.os.Buffer.from(handler.message_buffer.list.items[first_message_len..]);
+ socket_buffers[1] = std.x.os.Buffer.from(handler.message_buffer.list.items[0..first_message_len]);
+
if (build_result.bytes.len > 0) {
- _ = try handler.conn.client.write(build_result.bytes, SOCKET_FLAGS);
+ socket_buffers[2] = std.x.os.Buffer.from(build_result.bytes);
+ // we reuse the accept key buffer
+ // so we have a pointer that is not stack memory
+ handler.accept_key[0..@sizeOf(usize)].* = @bitCast([@sizeOf(usize)]u8, std.hash.Wyhash.hash(0, build_result.bytes));
+ socket_buffers[3] = std.x.os.Buffer.from(handler.accept_key[0..4]);
+ socket_buffer_count = 4;
}
},
.fail => |fail| {
try fail.encode(&buffer_writer);
head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len);
- try handler.websocket.writeHeader(head, handler.message_buffer.list.items.len);
- _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ const first_message_len = handler.message_buffer.list.items.len;
+ try head.writeHeader(&handler.message_buffer.writer(), handler.message_buffer.list.items.len);
+ socket_buffers[0] = std.x.os.Buffer.from(handler.message_buffer.list.items[first_message_len..]);
+ socket_buffers[1] = std.x.os.Buffer.from(handler.message_buffer.list.items[0..first_message_len]);
},
}
+
+ _ = try handler.conn.client.writeMessage(
+ std.x.os.Socket.Message.fromBuffers(socket_buffers[0..socket_buffer_count]),
+ SOCKET_FLAGS,
+ );
}
},
else => {
@@ -2106,7 +2158,11 @@ pub const RequestContext = struct {
ctx.to_plain_text = accept.category == .text and strings.eqlComptime(accept.value, "text/plain");
if (!ctx.to_plain_text) {
- ctx.appendHeader("Content-Type", ctx.mime_type.value);
+ if (!ctx.url.is_source_map) {
+ ctx.appendHeader("Content-Type", ctx.mime_type.value);
+ } else {
+ ctx.appendHeader("Content-Type", MimeType.json.value);
+ }
} else {
ctx.appendHeader("Content-Type", "text/plain");
}
@@ -2183,9 +2239,35 @@ pub const RequestContext = struct {
return if (buffer.list.items.len > 1) buffer.list.items[buffer.list.items.len - 2] else 0;
}
+ pub fn getWritten(_: *const SocketPrinterInternal) []u8 {
+ return buffer.list.items;
+ }
+
+ const SourceMapHandler = JSPrinter.SourceMapHandler.For(SocketPrinterInternal, onSourceMapChunk);
+ pub fn onSourceMapChunk(this: *SocketPrinterInternal, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void {
+ if (this.rctx.has_called_done) return;
+ buffer.reset();
+ buffer = try chunk.printSourceMapContents(source, buffer, false);
+ defer buffer.reset();
+ const buf = buffer.toOwnedSliceLeaky();
+ if (buf.len == 0) {
+ try this.rctx.sendNoContent();
+ return;
+ }
+
+ defer this.rctx.done();
+ try this.rctx.writeStatus(200);
+ try this.rctx.prepareToSendBody(buf.len, false);
+ try this.rctx.writeBodyBuf(buf);
+ }
+ pub fn sourceMapHandler(this: *SocketPrinterInternal) JSPrinter.SourceMapHandler {
+ return SourceMapHandler.init(this);
+ }
+
pub fn done(
chunky: *SocketPrinterInternal,
) anyerror!void {
+ if (chunky.rctx.has_called_done) return;
const buf = buffer.toOwnedSliceLeaky();
defer buffer.reset();
@@ -2216,6 +2298,16 @@ pub const RequestContext = struct {
return;
}
+ var source_map_url: string = "";
+ const send_sourcemap_info = chunky._loader.isJavaScriptLike();
+
+ if (send_sourcemap_info) {
+ // This will be cleared by the arena
+ source_map_url = std.mem.span(chunky.rctx.getFullURLForSourceMap());
+
+ chunky.rctx.appendHeader("SourceMap", source_map_url);
+ }
+
// Failed experiment: inject "Link" tags for each import path
// Browsers ignore this header when it's coming from a script import.
// In Chrome, the header appears in the Network tab but doesn't seem to do anything
@@ -2241,8 +2333,20 @@ pub const RequestContext = struct {
defer chunky.rctx.done();
try chunky.rctx.writeStatus(200);
- try chunky.rctx.prepareToSendBody(buf.len, false);
+ const source_map_url_len: usize = if (send_sourcemap_info)
+ "\n//# sourceMappingURL=".len + source_map_url.len + "\n".len
+ else
+ 0;
+ try chunky.rctx.prepareToSendBody(buf.len + source_map_url_len, false);
+
try chunky.rctx.writeBodyBuf(buf);
+
+ if (send_sourcemap_info) {
+ // TODO: use an io vec
+ try chunky.rctx.writeBodyBuf("\n//# sourceMappingURL=");
+ try chunky.rctx.writeBodyBuf(source_map_url);
+ try chunky.rctx.writeBodyBuf("\n");
+ }
}
pub fn flush(
@@ -2250,7 +2354,7 @@ pub const RequestContext = struct {
) anyerror!void {}
};
- const SocketPrinter = js_printer.NewWriter(
+ const SocketPrinter = JSPrinter.NewWriter(
SocketPrinterInternal,
SocketPrinterInternal.writeByte,
SocketPrinterInternal.writeAll,
@@ -2261,7 +2365,7 @@ pub const RequestContext = struct {
);
const loader = ctx.bundler.options.loaders.get(result.file.input.name.ext) orelse .file;
- var chunked_encoder = SocketPrinter.init(
+ var socket_printer = SocketPrinter.init(
SocketPrinterInternal.init(ctx, loader),
);
@@ -2280,20 +2384,40 @@ pub const RequestContext = struct {
}
}
- var written = ctx.bundler.buildWithResolveResult(
- resolve_result,
- ctx.allocator,
- loader,
- SocketPrinter,
- chunked_encoder,
- .absolute_url,
- input_fd,
- hash,
- Watcher,
- ctx.watcher,
- client_entry_point_,
- ctx.origin,
- ) catch |err| {
+ const written = (if (!ctx.url.is_source_map)
+ ctx.bundler.buildWithResolveResult(
+ resolve_result,
+ ctx.allocator,
+ loader,
+ SocketPrinter,
+ socket_printer,
+ .absolute_url,
+ input_fd,
+ hash,
+ Watcher,
+ ctx.watcher,
+ client_entry_point_,
+ ctx.origin,
+ false,
+ null,
+ )
+ else
+ ctx.bundler.buildWithResolveResult(
+ resolve_result,
+ ctx.allocator,
+ loader,
+ SocketPrinter,
+ socket_printer,
+ .absolute_url,
+ input_fd,
+ hash,
+ Watcher,
+ ctx.watcher,
+ client_entry_point_,
+ ctx.origin,
+ true,
+ socket_printer.ctx.sourceMapHandler(),
+ )) catch |err| {
ctx.sendInternalError(err) catch {};
return;
};
@@ -2708,12 +2832,12 @@ pub const RequestContext = struct {
var expr = try JSON.toAST(ctx.allocator, Info, info);
defer ctx.bundler.resetStore();
- var buffer_writer = try js_printer.BufferWriter.init(default_allocator);
+ var buffer_writer = try JSPrinter.BufferWriter.init(default_allocator);
- var writer = js_printer.BufferPrinter.init(buffer_writer);
+ var writer = JSPrinter.BufferPrinter.init(buffer_writer);
defer writer.ctx.buffer.deinit();
var source = logger.Source.initEmptyFile("info.json");
- _ = try js_printer.printJSON(*js_printer.BufferPrinter, &writer, expr, &source);
+ _ = try JSPrinter.printJSON(*JSPrinter.BufferPrinter, &writer, expr, &source);
const buffer = writer.ctx.written;
ctx.appendHeader("Content-Type", MimeType.json.value);
@@ -3032,7 +3156,9 @@ pub const Server = struct {
javascript_enabled: bool = false,
fallback_only: bool = false,
websocket_threadpool: ThreadPool = ThreadPool.init(.{
- .stack_size = 128 * 1024, // `pthread_attr_setstacksize` does not like 128 KB stack size
+ // on macOS, the max stack size is 65520 bytes,
+ // so we ask for 65519
+ .stack_size = 65519,
.max_threads = std.math.maxInt(u32),
}),
@@ -3341,11 +3467,19 @@ pub const Server = struct {
fn run(server: *Server, comptime features: ConnectionFeatures) !void {
_ = Fs.FileSystem.RealFS.adjustUlimit() catch {};
+
RequestContext.WebsocketHandler.open_websockets = @TypeOf(
RequestContext.WebsocketHandler.open_websockets,
).init(server.allocator);
const listener = try tcp.Listener.init(.ip, .{ .close_on_exec = true });
defer listener.deinit();
+ server.websocket_threadpool.stack_size = @truncate(
+ u32,
+ @minimum(
+ @maximum(128_000, Fs.FileSystem.RealFS.Limit.stack),
+ 4_000_000,
+ ),
+ );
listener.setReuseAddress(true) catch {};
listener.setReusePort(false) catch {};
diff --git a/src/http/mime_type.zig b/src/http/mime_type.zig
index 14df12089..73b545721 100644
--- a/src/http/mime_type.zig
+++ b/src/http/mime_type.zig
@@ -178,6 +178,7 @@ const extensions = ComptimeStringMap(MimeType, .{
.{ "mid", MimeType.initComptime("audio/mid", .audio) },
.{ "mid", MimeType.initComptime("audio/mid", .audio) },
.{ "json", MimeType.json },
+ .{ "map", MimeType.json }, // source map
.{ "jpeg", MimeType.initComptime("image/jpeg", .image) },
.{ "aiff", MimeType.initComptime("image/png", .image) },
.{ "tiff", MimeType.initComptime("image/tiff", .image) },
diff --git a/src/http/url_path.zig b/src/http/url_path.zig
index e3e90a60f..0e4f12f83 100644
--- a/src/http/url_path.zig
+++ b/src/http/url_path.zig
@@ -21,6 +21,9 @@ pathname: string = "",
first_segment: string = "",
query_string: string = "",
needs_redirect: bool = false,
+/// Treat URLs as non-sourcemap URLS
+/// Then at the very end, we check.
+is_source_map: bool = false,
pub fn isRoot(this: *const URLPath, asset_prefix: string) bool {
const without = this.pathWithoutAssetPrefix(asset_prefix);
@@ -36,7 +39,12 @@ pub fn pathWithoutAssetPrefix(this: *const URLPath, asset_prefix: string) string
const base = this.path;
const origin = asset_prefix[leading_slash_offset..];
- return if (base.len >= origin.len and strings.eql(base[0..origin.len], origin)) base[origin.len..] else base;
+ const out = if (base.len >= origin.len and strings.eql(base[0..origin.len], origin)) base[origin.len..] else base;
+ if (this.is_source_map and strings.endsWithComptime(out, ".map")) {
+ return out[0 .. out.len - 4];
+ }
+
+ return out;
}
// optimization: very few long strings will be URL-encoded
@@ -50,7 +58,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath {
var decoded_pathname = possibly_encoded_pathname_;
var needs_redirect = false;
- if (strings.indexOfChar(decoded_pathname, '%') != null) {
+ if (strings.containsChar(decoded_pathname, '%')) {
var possibly_encoded_pathname = switch (decoded_pathname.len) {
0...1024 => &temp_path_buf,
else => &big_temp_path_buf,
@@ -78,6 +86,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath {
var question_mark_i: i16 = -1;
var period_i: i16 = -1;
+
var first_segment_end: i16 = std.math.maxInt(i16);
var last_slash: i16 = -1;
@@ -88,7 +97,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath {
switch (c) {
'?' => {
- question_mark_i = std.math.max(question_mark_i, i);
+ question_mark_i = @maximum(question_mark_i, i);
if (question_mark_i < period_i) {
period_i = -1;
}
@@ -98,13 +107,13 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath {
}
},
'.' => {
- period_i = std.math.max(period_i, i);
+ period_i = @maximum(period_i, i);
},
'/' => {
- last_slash = std.math.max(last_slash, i);
+ last_slash = @maximum(last_slash, i);
if (i > 0) {
- first_segment_end = std.math.min(first_segment_end, i);
+ first_segment_end = @minimum(first_segment_end, i);
}
},
else => {},
@@ -115,6 +124,8 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath {
period_i = -1;
}
+ // .js.map
+ // ^
const extname = brk: {
if (question_mark_i > -1 and period_i > -1) {
period_i += 1;
@@ -127,12 +138,22 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath {
}
};
- const path = if (question_mark_i < 0) decoded_pathname[1..] else decoded_pathname[1..@intCast(usize, question_mark_i)];
+ var path = if (question_mark_i < 0) decoded_pathname[1..] else decoded_pathname[1..@intCast(usize, question_mark_i)];
- const first_segment = decoded_pathname[1..std.math.min(@intCast(usize, first_segment_end), decoded_pathname.len)];
+ const first_segment = decoded_pathname[1..@minimum(@intCast(usize, first_segment_end), decoded_pathname.len)];
+ const is_source_map = strings.eqlComptime(extname, "map");
+ var backup_extname: string = extname;
+ if (is_source_map and path.len > ".map".len) {
+ if (std.mem.lastIndexOfScalar(u8, path[0 .. path.len - ".map".len], '.')) |j| {
+ backup_extname = path[j + 1 ..];
+ backup_extname = backup_extname[0 .. backup_extname.len - ".map".len];
+ path = path[0 .. j + backup_extname.len + 1];
+ }
+ }
return URLPath{
- .extname = extname,
+ .extname = if (!is_source_map) extname else backup_extname,
+ .is_source_map = is_source_map,
.pathname = decoded_pathname,
.first_segment = first_segment,
.path = if (decoded_pathname.len == 1) "." else path,
diff --git a/src/http/websocket.zig b/src/http/websocket.zig
index b8a14d77a..106f18433 100644
--- a/src/http/websocket.zig
+++ b/src/http/websocket.zig
@@ -53,6 +53,17 @@ pub const WebsocketHeader = packed struct {
compressed: bool = false, // rsv1
final: bool = true,
+ pub fn writeHeader(header: WebsocketHeader, writer: anytype, n: usize) anyerror!void {
+ try writer.writeIntBig(u16, @bitCast(u16, header));
+
+ // Write extended length if needed
+ switch (n) {
+ 0...126 => {}, // Included in header
+ 127...0xFFFF => try writer.writeIntBig(u16, @truncate(u16, n)),
+ else => try writer.writeIntBig(u64, n),
+ }
+ }
+
pub fn packLength(length: usize) u7 {
return switch (length) {
0...126 => @truncate(u7, length),
@@ -194,22 +205,6 @@ pub const Websocket = struct {
});
}
- pub fn writeHeader(self: *Websocket, header: WebsocketHeader, n: usize) anyerror!void {
- var stream = self.conn.client.writer(self.flags);
-
- try stream.writeIntBig(u16, @bitCast(u16, header));
-
- // Write extended length if needed
- switch (n) {
- 0...126 => {}, // Included in header
- 127...0xFFFF => try stream.writeIntBig(u16, @truncate(u16, n)),
- else => try stream.writeIntBig(u64, n),
- }
-
- // try self.io.flush();
-
- }
-
// Write a raw data frame
pub fn writeDataFrame(self: *Websocket, dataframe: WebsocketDataFrame) anyerror!usize {
var stream = self.conn.client.writer(self.flags);
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 6f73f426a..29fa317ba 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -211,6 +211,10 @@ pub const BindingNodeIndex = Binding;
pub const StmtNodeIndex = Stmt;
pub const ExprNodeIndex = Expr;
+/// This is like ArrayList except it stores the length and capacity as u32
+/// In practice, it is very unusual to have lengths above 4 GB
+///
+/// This lets us have array lists which occupy the same amount of space as a slice
pub fn BabyList(comptime Type: type) type {
return struct {
const ListType = @This();
diff --git a/src/js_lexer/identifier.zig b/src/js_lexer/identifier.zig
index 3a6d6e8b8..3bd2098bb 100644
--- a/src/js_lexer/identifier.zig
+++ b/src/js_lexer/identifier.zig
@@ -16,7 +16,7 @@ pub const Bitset = struct {
pub fn init() void {}
- pub inline fn isIdentifierStart(codepoint: i32) bool {
+ pub fn isIdentifierStart(codepoint: i32) bool {
return codepoint >= (comptime id_start_range[0]) and
codepoint <= (comptime id_start_range[1]) and
id_start.isSet((comptime @intCast(usize, id_start_range[1])) - @intCast(
@@ -38,7 +38,7 @@ pub const Bitset = struct {
return true;
}
- pub inline fn isIdentifierPart(codepoint: i32) bool {
+ pub fn isIdentifierPart(codepoint: i32) bool {
return codepoint >= (comptime id_end_range[0]) and
codepoint <= (comptime id_end_range[1]) and
id_continue.isSet(
diff --git a/src/js_printer.zig b/src/js_printer.zig
index a424f061d..15e6d606f 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -57,6 +57,7 @@ const assert = std.debug.assert;
threadlocal var imported_module_ids_list: std.ArrayList(u32) = undefined;
threadlocal var imported_module_ids_list_unset: bool = true;
const ImportRecord = importRecord.ImportRecord;
+const SourceMap = @import("./sourcemap/sourcemap.zig");
fn notimpl() void {
Global.panic("Not implemented yet!", .{});
@@ -80,27 +81,178 @@ pub fn writeModuleId(comptime Writer: type, writer: Writer, module_id: u32) void
std.fmt.formatInt(module_id, 16, .lower, .{}, writer) catch unreachable;
}
-pub const SourceMapChunk = struct {
- buffer: MutableString,
- end_state: State = State{},
- final_generated_column: usize = 0,
- should_ignore: bool = false,
-
- // Coordinates in source maps are stored using relative offsets for size
- // reasons. When joining together chunks of a source map that were emitted
- // in parallel for different parts of a file, we need to fix up the first
- // segment of each chunk to be relative to the end of the previous chunk.
- pub const State = struct {
- // This isn't stored in the source map. It's only used by the bundler to join
- // source map chunks together correctly.
- generated_line: i32 = 0,
-
- // These are stored in the source map in VLQ format.
- generated_column: i32 = 0,
- source_index: i32 = 0,
- original_line: i32 = 0,
- original_column: i32 = 0,
- };
+pub fn canPrintWithoutEscape(comptime CodePointType: type, c: CodePointType, comptime ascii_only: bool) bool {
+ if (c <= last_ascii) {
+ return c >= first_ascii and c != '\\' and c != '"';
+ } else {
+ return !ascii_only and c != 0xFEFF and (c < first_high_surrogate or c > last_low_surrogate);
+ }
+}
+
+pub fn estimateLengthForJSON(input: []const u8, comptime ascii_only: bool) usize {
+ var remaining = input;
+ var len: u32 = 2; // for quotes
+
+ while (strings.indexOfNeedsEscape(remaining)) |i| {
+ len += i;
+ remaining = remaining[i..];
+ const char_len = strings.wtf8ByteSequenceLength(remaining[0]);
+ const c = strings.decodeWTF8RuneT(remaining.ptr[0..4], char_len, i32, 0);
+ if (canPrintWithoutEscape(i32, c, ascii_only)) {
+ len += @as(u32, char_len);
+ } else if (c <= 0xFFFF) {
+ len += 6;
+ } else {
+ len += 12;
+ }
+ remaining = remaining[char_len..];
+ } else {
+ return @truncate(u32, remaining.len) + 2;
+ }
+
+ return len;
+}
+
+pub fn quoteForJSON(text: []const u8, output_: MutableString, comptime ascii_only: bool) !MutableString {
+ var bytes = output_;
+ try bytes.growIfNeeded(estimateLengthForJSON(text, ascii_only));
+ try bytes.appendChar('"');
+ var i: usize = 0;
+ var n: usize = text.len;
+ while (i < n) {
+ const width = strings.wtf8ByteSequenceLength(text[i]);
+ const c = strings.decodeWTF8RuneT(text.ptr[i .. i + 4][0..4], width, i32, 0);
+ if (canPrintWithoutEscape(i32, c, ascii_only)) {
+ const remain = text[i + @as(usize, width) ..];
+ if (strings.indexOfNeedsEscape(remain)) |j| {
+ try bytes.appendSlice(text[i .. i + j + @as(usize, width)]);
+ i += j + @as(usize, width);
+ continue;
+ } else {
+ try bytes.appendSlice(text[i..]);
+ i = n;
+ break;
+ }
+ }
+ switch (c) {
+ // Special-case the bell character since it may cause dumping this file to
+ // the terminal to make a sound, which is undesirable. Note that we can't
+ // use an octal literal to print this shorter since octal literals are not
+ // allowed in strict mode (or in template strings).
+ 0x07 => {
+ try bytes.appendSlice("\\x07");
+ i += 1;
+ },
+ 0x08 => {
+ try bytes.appendSlice("\\b");
+ i += 1;
+ },
+ 0x0C => {
+ try bytes.appendSlice("\\f");
+ i += 1;
+ },
+ '\n' => {
+ try bytes.appendSlice("\\n");
+ i += 1;
+ },
+ std.ascii.control_code.CR => {
+ try bytes.appendSlice("\\r");
+ i += 1;
+ },
+ // \v
+ std.ascii.control_code.VT => {
+ try bytes.appendSlice("\\v");
+ i += 1;
+ },
+ // "\\"
+ '\\' => {
+ try bytes.appendSlice("\\\\");
+ i += 1;
+ },
+ '"' => {
+ try bytes.appendSlice("\\\"");
+ i += 1;
+ },
+
+ '\t' => {
+ try bytes.appendSlice("\\t");
+ i += 1;
+ },
+
+ else => {
+ i += @as(usize, width);
+
+ if (c < 0xFFFF) {
+ const k = @intCast(usize, c);
+ bytes.ensureUnusedCapacity(6) catch unreachable;
+ const old = bytes.list.items.len;
+ bytes.list.items.len += 6;
+
+ bytes.list.items[old .. old + 6].ptr[0..6].* = [_]u8{
+ '\\',
+ 'u',
+ hex_chars[(k >> 12) & 0xF],
+ hex_chars[(k >> 8) & 0xF],
+ hex_chars[(k >> 4) & 0xF],
+ hex_chars[k & 0xF],
+ };
+ } else {
+ bytes.ensureUnusedCapacity(12) catch unreachable;
+ const old = bytes.list.items.len;
+ bytes.list.items.len += 12;
+
+ const k = c - 0x10000;
+ const lo = @intCast(usize, first_high_surrogate + ((k >> 10) & 0x3FF));
+ const hi = @intCast(usize, first_low_surrogate + (k & 0x3FF));
+
+ bytes.list.items[old .. old + 12][0..12].* = [_]u8{
+ '\\',
+ 'u',
+ hex_chars[lo >> 12],
+ hex_chars[(lo >> 8) & 15],
+ hex_chars[(lo >> 4) & 15],
+ hex_chars[lo & 15],
+ '\\',
+ 'u',
+ hex_chars[hi >> 12],
+ hex_chars[(hi >> 8) & 15],
+ hex_chars[(hi >> 4) & 15],
+ hex_chars[hi & 15],
+ };
+ }
+ },
+ }
+ }
+ bytes.appendChar('"') catch unreachable;
+ return bytes;
+}
+
+test "quoteForJSON" {
+ var allocator = default_allocator;
+ try std.testing.expectEqualStrings("\"I don't need any quotes.\"", try quoteForJSON("I don't need any quotes.", allocator, false));
+ try std.testing.expectEqualStrings("\"I need a quote for \\\"this\\\".\"", try quoteForJSON("I need a quote for \"this\".", allocator, false));
+}
+
+pub const SourceMapHandler = struct {
+ ctx: *anyopaque,
+ callback: Callback,
+
+ const Callback = (fn (*anyopaque, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void);
+ pub fn onSourceMapChunk(self: *const @This(), chunk: SourceMap.Chunk, source: logger.Source) anyerror!void {
+ try self.callback(self.ctx, chunk, source);
+ }
+
+ pub fn For(comptime Type: type, comptime handler: (fn (t: *Type, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void)) type {
+ return struct {
+ pub fn onChunk(self: *anyopaque, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void {
+ try handler(@ptrCast(*Type, @alignCast(@alignOf(*Type), self)), chunk, source);
+ }
+
+ pub fn init(self: *Type) SourceMapHandler {
+ return SourceMapHandler{ .ctx = self, .callback = onChunk };
+ }
+ };
+ }
};
pub const Options = struct {
@@ -114,6 +266,8 @@ pub const Options = struct {
source_path: ?fs.Path = null,
bundle_export_ref: ?Ref = null,
rewrite_require_resolve: bool = true,
+ allocator: std.mem.Allocator = default_allocator,
+ source_map_handler: ?SourceMapHandler = null,
css_import_behavior: Api.CssInJsBehavior = Api.CssInJsBehavior.facade,
@@ -142,7 +296,10 @@ pub const Options = struct {
}
};
-pub const PrintResult = struct { js: string, source_map: ?SourceMapChunk = null };
+pub const PrintResult = struct {
+ js: string,
+ source_map: ?SourceMap.Chunk = null,
+};
// do not make this a packed struct
// stage1 compiler bug:
@@ -247,6 +404,7 @@ pub fn NewPrinter(
comptime bun: bool,
comptime is_inside_bundle: bool,
comptime is_json: bool,
+ comptime generate_source_map: bool,
) type {
return struct {
symbols: Symbol.Map,
@@ -271,6 +429,7 @@ pub fn NewPrinter(
renamer: rename.Renamer,
prev_stmt_tag: Stmt.Tag = .s_empty,
+ source_map_builder: SourceMap.Chunk.Builder = undefined,
const Printer = @This();
@@ -535,7 +694,13 @@ pub fn NewPrinter(
}
// noop for now
- pub fn addSourceMapping(_: *Printer, _: logger.Loc) void {}
+ pub inline fn addSourceMapping(printer: *Printer, location: logger.Loc) void {
+ if (comptime !generate_source_map) {
+ return;
+ }
+
+ printer.source_map_builder.addSourceMapping(location, printer.writer.slice());
+ }
pub fn printSymbol(p: *Printer, ref: Ref) void {
const name = p.renamer.nameForSymbol(ref);
@@ -4066,6 +4231,7 @@ pub fn NewPrinter(
symbols: Symbol.Map,
opts: Options,
linker: ?*Linker,
+ allocator: std.mem.Allocator,
) !Printer {
if (imported_module_ids_list_unset) {
imported_module_ids_list = std.ArrayList(u32).init(default_allocator);
@@ -4074,6 +4240,13 @@ pub fn NewPrinter(
imported_module_ids_list.clearRetainingCapacity();
+ var source_map_builder: SourceMap.Chunk.Builder = undefined;
+
+ if (comptime generate_source_map) {
+ source_map_builder = SourceMap.Chunk.Builder{ .source_map = MutableString.initEmpty(allocator) };
+ source_map_builder.line_offset_tables = SourceMap.LineOffsetTable.generate(allocator, source.contents, @intCast(i32, tree.approximate_newline_count));
+ }
+
return Printer{
.import_records = tree.import_records,
.options = opts,
@@ -4082,6 +4255,7 @@ pub fn NewPrinter(
.linker = linker,
.imported_module_ids = imported_module_ids_list,
.renamer = rename.Renamer.init(symbols, source),
+ .source_map_builder = source_map_builder,
};
}
};
@@ -4451,8 +4625,18 @@ pub fn printAst(
opts: Options,
comptime LinkerType: type,
linker: ?*LinkerType,
+ comptime generate_source_map: bool,
) !usize {
- const PrinterType = NewPrinter(ascii_only, Writer, LinkerType, false, false, false, false);
+ const PrinterType = NewPrinter(
+ ascii_only,
+ Writer,
+ LinkerType,
+ false,
+ false,
+ false,
+ false,
+ generate_source_map,
+ );
var writer = _writer;
var printer = try PrinterType.init(
@@ -4462,6 +4646,7 @@ pub fn printAst(
symbols,
opts,
linker,
+ opts.allocator,
);
defer {
imported_module_ids_list = printer.imported_module_ids;
@@ -4484,6 +4669,12 @@ pub fn printAst(
}
}
+ if (comptime generate_source_map) {
+ if (opts.source_map_handler) |handler| {
+ try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*);
+ }
+ }
+
try printer.writer.done();
return @intCast(usize, @maximum(printer.writer.written, 0));
@@ -4495,7 +4686,7 @@ pub fn printJSON(
expr: Expr,
source: *const logger.Source,
) !usize {
- const PrinterType = NewPrinter(false, Writer, void, false, false, false, true);
+ const PrinterType = NewPrinter(false, Writer, void, false, false, false, true, false);
var writer = _writer;
var s_expr = S.SExpr{ .value = expr };
var stmt = Stmt{ .loc = logger.Loc.Empty, .data = .{
@@ -4511,6 +4702,7 @@ pub fn printJSON(
std.mem.zeroes(Symbol.Map),
.{},
null,
+ undefined,
);
printer.printExpr(expr, Level.lowest, ExprFlag.Set{});
@@ -4532,8 +4724,9 @@ pub fn printCommonJS(
opts: Options,
comptime LinkerType: type,
linker: ?*LinkerType,
+ comptime generate_source_map: bool,
) !usize {
- const PrinterType = NewPrinter(ascii_only, Writer, LinkerType, true, false, false, false);
+ const PrinterType = NewPrinter(ascii_only, Writer, LinkerType, true, false, false, false, generate_source_map);
var writer = _writer;
var printer = try PrinterType.init(
writer,
@@ -4542,6 +4735,7 @@ pub fn printCommonJS(
symbols,
opts,
linker,
+ opts.allocator,
);
defer {
imported_module_ids_list = printer.imported_module_ids;
@@ -4567,6 +4761,12 @@ pub fn printCommonJS(
// Add a couple extra newlines at the end
printer.writer.print(@TypeOf("\n\n"), "\n\n");
+ if (comptime generate_source_map) {
+ if (opts.source_map_handler) |handler| {
+ try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*);
+ }
+ }
+
try printer.writer.done();
return @intCast(usize, @maximum(printer.writer.written, 0));
@@ -4594,7 +4794,7 @@ pub fn printCommonJSThreaded(
comptime getPos: fn (ctx: GetPosType) anyerror!u64,
end_off_ptr: *u32,
) !WriteResult {
- const PrinterType = NewPrinter(ascii_only, Writer, LinkerType, true, false, true, false);
+ const PrinterType = NewPrinter(ascii_only, Writer, LinkerType, true, false, true, false, false);
var writer = _writer;
var printer = try PrinterType.init(
writer,
@@ -4603,6 +4803,7 @@ pub fn printCommonJSThreaded(
symbols,
opts,
linker,
+ undefined,
);
defer {
diff --git a/src/runtime/hmr.ts b/src/runtime/hmr.ts
index fa83695e9..a52e387d9 100644
--- a/src/runtime/hmr.ts
+++ b/src/runtime/hmr.ts
@@ -361,9 +361,16 @@ if (typeof window !== "undefined") {
) {
const start = performance.now();
var update = this.findCSSLinkTag(build.id);
+ // The last 4 bytes of the build message are the hash of the module
+ // Currently, this hash is only used for ensuring we reload the source-map
+ var end = buffer.index + build.blob_length;
+ if (end > buffer.data.length && end > 4) {
+ end = buffer.data.length - 4;
+ }
+
let bytes =
buffer.data.length > buffer.index
- ? buffer.data.subarray(buffer.index)
+ ? buffer.data.subarray(buffer.index, end)
: new Uint8Array(0);
if (update === null) {
__hmrlog.debug("Skipping unused CSS.");
@@ -853,16 +860,36 @@ if (typeof window !== "undefined") {
__hmrlog.debug("Preparing to reload", filepath);
}
+ // The last 4 bytes of the build message are the hash of the module
+ // Currently, this hash is only used for ensuring we reload the source-map
+ var end = buffer.index + build.blob_length;
+ var hash = 0;
+ if (end > buffer.data.length && end > 4) {
+ end = buffer.data.length - 4;
+ }
+
+ if (end > 4 && buffer.data.length >= end + 4) {
+ new Uint8Array(this.hashBuffer.buffer).set(
+ buffer.data.subarray(end, end + 4)
+ );
+ hash = this.hashBuffer[0];
+ }
+
+ // These are the bytes!!
+ const fileBytes =
+ buffer.data.length > buffer.index
+ ? buffer.data.subarray(buffer.index, end)
+ : new Uint8Array(0);
+
var reload = new HotReload(
build.id,
index,
build,
- // These are the bytes!!
- buffer.data.length > buffer.index
- ? buffer.data.subarray(buffer.index)
- : new Uint8Array(0),
- ReloadBehavior.hotReload
+ fileBytes,
+ ReloadBehavior.hotReload,
+ hash || 0
);
+
reload.timings.notify = timestamp - build.from_timestamp;
BunError.clear();
@@ -1068,6 +1095,7 @@ if (typeof window !== "undefined") {
buildCommandBuf = new Uint8Array(9);
buildCommandUArray = new Uint32Array(1);
buildCommandUArrayEight = new Uint8Array(this.buildCommandUArray.buffer);
+ hashBuffer = new Uint32Array(1);
// lazily allocate because it's going to be much larger than 9 bytes
buildCommandBufWithFilePath: Uint8Array;
@@ -1254,6 +1282,7 @@ if (typeof window !== "undefined") {
module_id: number = 0;
module_index: number = 0;
build: API.WebsocketMessageBuildSuccess;
+ hash: number = 0 | 0;
timings = {
notify: 0,
@@ -1272,13 +1301,15 @@ if (typeof window !== "undefined") {
module_index: HotReload["module_index"],
build: HotReload["build"],
bytes: Uint8Array,
- reloader: ReloadBehavior
+ reloader: ReloadBehavior,
+ hash: number
) {
this.module_id = module_id;
this.module_index = module_index;
this.build = build;
this.bytes = bytes;
this.reloader = reloader;
+ this.hash = hash;
}
async run() {
@@ -1306,8 +1337,28 @@ if (typeof window !== "undefined") {
var oldModule = HMRModule.dependencies.modules[this.module_index];
HMRModule.dependencies = orig_deps.fork(this.module_index);
var blobURL = null;
+
+ // We inject the source map URL into the end of the file.
+ // We do that here for a few reasons:
+ // 1. It is hard to correctly set the path in here to what the browser expects.
+ // 2.
+ const modulePathWithoutLeadingSlash =
+ this.build.module_path.length > 0 && this.build.module_path[0] === "/"
+ ? this.build.module_path.substring(1)
+ : this.build.module_path;
+ const sourceMapURL =
+ this.hash > 0 && this.build.module_path.length > 0
+ ? `\n//# sourceMappingURL=${
+ // location.origin does not have a trailing slash
+ globalThis.location.origin
+ }/${modulePathWithoutLeadingSlash}.map?b=${this.hash.toString(16)}`
+ : "";
+
try {
- const blob = new Blob([this.bytes], { type: "text/javascript" });
+ const blob = new Blob(
+ sourceMapURL.length > 0 ? [this.bytes, sourceMapURL] : [this.bytes],
+ { type: "text/javascript" }
+ );
blobURL = URL.createObjectURL(blob);
HMRModule.dependencies.blobToID.set(blobURL, this.module_id);
await import(blobURL);
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index 9e90093af..28a4ba11d 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -33,8 +33,14 @@ pub inline fn indexAnyComptime(target: string, comptime chars: string) ?usize {
return null;
}
-pub inline fn indexOfChar(self: string, char: u8) ?usize {
- return std.mem.indexOfScalar(@TypeOf(char), self, char);
+pub fn repeatingAlloc(allocator: std.mem.Allocator, count: usize, char: u8) ![]u8 {
+ var buf = try allocator.alloc(u8, count);
+ repeatingBuf(buf, char);
+ return buf;
+}
+
+pub fn repeatingBuf(self: []u8, char: u8) void {
+ @memset(self.ptr, char, self.len);
}
pub fn indexOfCharNeg(self: string, char: u8) i32 {
@@ -62,6 +68,43 @@ pub inline fn indexOf(self: string, str: string) ?usize {
return std.mem.indexOf(u8, self, str);
}
+// --
+// This is faster when the string is found, by about 2x for a 4 MB file.
+// It is slower when the string is NOT found
+// fn indexOfPosN(comptime T: type, buf: []const u8, start_index: usize, delimiter: []const u8, comptime n: comptime_int) ?usize {
+// const k = delimiter.len;
+// const V8x32 = @Vector(n, T);
+// const V1x32 = @Vector(n, u1);
+// const Vbx32 = @Vector(n, bool);
+// const first = @splat(n, delimiter[0]);
+// const last = @splat(n, delimiter[k - 1]);
+
+// var end: usize = start_index + n;
+// var start: usize = end - n;
+// while (end < buf.len) {
+// start = end - n;
+// const last_end = @minimum(end + k - 1, buf.len);
+// const last_start = last_end - n;
+
+// // Look for the first character in the delimter
+// const first_chunk: V8x32 = buf[start..end][0..n].*;
+// const last_chunk: V8x32 = buf[last_start..last_end][0..n].*;
+// const mask = @bitCast(V1x32, first == first_chunk) & @bitCast(V1x32, last == last_chunk);
+
+// if (@reduce(.Or, mask) != 0) {
+// // TODO: Use __builtin_clz???
+// for (@as([n]bool, @bitCast(Vbx32, mask))) |match, i| {
+// if (match and eqlLong(buf[start + i .. start + i + k], delimiter, false)) {
+// return start + i;
+// }
+// }
+// }
+// end = @minimum(end + n, buf.len);
+// }
+// if (start < buf.len) return std.mem.indexOfPos(T, buf, start_index, delimiter);
+// return null; // Not found
+// }
+
pub fn cat(allocator: std.mem.Allocator, first: string, second: string) !string {
var out = try allocator.alloc(u8, first.len + second.len);
std.mem.copy(u8, out, first);
@@ -184,6 +227,42 @@ pub fn copyLowercase(in: string, out: []u8) string {
return out[0..in.len];
}
+test "indexOf" {
+ const fixtures = .{
+ .{
+ "0123456789",
+ "456",
+ },
+ .{
+ "/foo/bar/baz/bacon/eggs/lettuce/tomatoe",
+ "bacon",
+ },
+ .{
+ "/foo/bar/baz/bacon////eggs/lettuce/tomatoe",
+ "eggs",
+ },
+ .{
+ "////////////////zfoo/bar/baz/bacon/eggs/lettuce/tomatoe",
+ "/",
+ },
+ .{
+ "/okay/well/thats/even/longer/now/well/thats/even/longer/now/well/thats/even/longer/now/foo/bar/baz/bacon/eggs/lettuce/tomatoe",
+ "/tomatoe",
+ },
+ .{
+ "/okay///////////so much length i can't believe it!much length i can't believe it!much length i can't believe it!much length i can't believe it!much length i can't believe it!much length i can't believe it!much length i can't believe it!much length i can't believe it!/well/thats/even/longer/now/well/thats/even/longer/now/well/thats/even/longer/now/foo/bar/baz/bacon/eggs/lettuce/tomatoe",
+ "/tomatoe",
+ },
+ };
+
+ inline for (fixtures) |pair| {
+ try std.testing.expectEqual(
+ indexOf(pair[0], pair[1]).?,
+ std.mem.indexOf(u8, pair[0], pair[1]).?,
+ );
+ }
+}
+
test "eqlComptimeCheckLen" {
try std.testing.expectEqual(eqlComptime("bun-darwin-aarch64.zip", "bun-darwin-aarch64.zip"), true);
const sizes = [_]u8{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 23, 22, 24 };
@@ -360,6 +439,38 @@ pub fn eqlAnyComptime(self: string, comptime list: []const string) bool {
return false;
}
+/// Count the occurences of a character in an ASCII byte array
+/// uses SIMD
+pub fn countChar(self: string, char: u8) usize {
+ var total: usize = 0;
+ var remaining = self;
+
+ const splatted: AsciiVector = @splat(ascii_vector_size, char);
+
+ while (remaining.len >= 16) {
+ const vec: AsciiVector = remaining[0..ascii_vector_size].*;
+ const cmp = @popCount(std.meta.Int(.unsigned, ascii_vector_size), @bitCast(@Vector(ascii_vector_size, u1), vec == splatted));
+ total += @as(usize, @reduce(.Add, cmp));
+ remaining = remaining[ascii_vector_size..];
+ }
+
+ while (remaining.len > 0) {
+ total += @as(usize, @boolToInt(remaining[0] == char));
+ remaining = remaining[1..];
+ }
+
+ return total;
+}
+
+test "countChar" {
+ try std.testing.expectEqual(countChar("hello there", ' '), 1);
+ try std.testing.expectEqual(countChar("hello;;;there", ';'), 3);
+ try std.testing.expectEqual(countChar("hello there", 'z'), 0);
+ try std.testing.expectEqual(countChar("hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there ", ' '), 28);
+ try std.testing.expectEqual(countChar("hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there ", 'z'), 0);
+ try std.testing.expectEqual(countChar("hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there hello there", ' '), 27);
+}
+
pub fn endsWithAnyComptime(self: string, comptime str: string) bool {
if (comptime str.len < 10) {
const last = self[self.len - 1];
@@ -1167,6 +1278,11 @@ pub inline fn wtf8ByteSequenceLength(first_byte: u8) u3 {
};
}
+pub fn firstCodePoint(text: []const u8) CodePoint {
+ const len = wtf8ByteSequenceLength(text[0]);
+ return decodeWTF8RuneT(text.ptr[0..4], len, CodePoint, 0);
+}
+
/// 0 == invalid
pub inline fn wtf8ByteSequenceLengthWithInvalid(first_byte: u8) u3 {
return switch (first_byte) {
@@ -1219,11 +1335,15 @@ pub inline fn decodeWTF8RuneTMultibyte(p: *const [4]u8, len: u3, comptime T: typ
unreachable;
}
-const ascii_vector_size = 16;
-const ascii_u16_vector_size = 8;
+const ascii_vector_size = if (Environment.isWasm) 8 else 16;
+const ascii_u16_vector_size = if (Environment.isWasm) 4 else 8;
+const AsciiVectorInt = std.meta.Int(.unsigned, ascii_vector_size);
+const AsciiVectorIntU16 = std.meta.Int(.unsigned, ascii_u16_vector_size);
const max_16_ascii = @splat(ascii_vector_size, @as(u8, 127));
+const min_16_ascii = @splat(ascii_vector_size, @as(u8, 0x20));
const max_u16_ascii = @splat(ascii_u16_vector_size, @as(u16, 127));
const AsciiVector = std.meta.Vector(ascii_vector_size, u8);
+const AsciiVectorU1 = std.meta.Vector(ascii_vector_size, u1);
const AsciiU16Vector = std.meta.Vector(ascii_u16_vector_size, u16);
const max_4_ascii = @splat(4, @as(u8, 127));
pub fn isAllASCII(slice: []const u8) bool {
@@ -1294,26 +1414,46 @@ pub fn firstNonASCII(slice: []const u8) ?u32 {
var remaining = slice;
if (comptime Environment.isAarch64 or Environment.isX64) {
- while (remaining.len >= 128) {
- comptime var count: usize = 0;
- inline while (count < 8) : (count += 1) {
- const vec: AsciiVector = remaining[(comptime count * ascii_vector_size)..][0..ascii_vector_size].*;
- const cmp = vec > max_16_ascii;
- const bitmask = @ptrCast(*const u16, &cmp).*;
- const first = @ctz(u16, bitmask);
- if (first < 16) {
- return @intCast(u32, (comptime count * ascii_vector_size) + @as(u32, first) + @intCast(u32, slice.len - remaining.len));
- }
+ while (remaining.len >= ascii_vector_size) {
+ const vec: AsciiVector = remaining[0..ascii_vector_size].*;
+ const cmp = vec > max_16_ascii;
+ const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*;
+ const first = @ctz(AsciiVectorInt, bitmask);
+ if (first < ascii_vector_size) {
+ return @as(u32, first) + @intCast(u32, slice.len - remaining.len);
}
- remaining = remaining[comptime ascii_vector_size * count..];
+
+ remaining = remaining[ascii_vector_size..];
+ }
+ }
+
+ for (remaining) |char, i| {
+ if (char > 127) {
+ return @truncate(u32, i + (slice.len - remaining.len));
}
+ }
+
+ return null;
+}
+
+pub fn indexOfNeedsEscape(slice: []const u8) ?u32 {
+ var remaining = slice;
+ if (remaining.len == 0)
+ return null;
+
+ if (remaining[0] > 127 or remaining[0] < 0x20 or remaining[0] == '\\' or remaining[0] == '"') {
+ return 0;
+ }
+ if (comptime Environment.isAarch64 or Environment.isX64) {
while (remaining.len >= ascii_vector_size) {
const vec: AsciiVector = remaining[0..ascii_vector_size].*;
- const cmp = vec > max_16_ascii;
- const bitmask = @ptrCast(*const u16, &cmp).*;
- const first = @ctz(u16, bitmask);
- if (first < 16) {
+ const cmp = @bitCast(AsciiVectorU1, (vec > max_16_ascii)) | @bitCast(AsciiVectorU1, (vec < min_16_ascii)) |
+ @bitCast(AsciiVectorU1, vec == @splat(ascii_vector_size, @as(u8, '\\'))) |
+ @bitCast(AsciiVectorU1, vec == @splat(ascii_vector_size, @as(u8, '"')));
+ const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*;
+ const first = @ctz(AsciiVectorInt, bitmask);
+ if (first < ascii_vector_size) {
return @as(u32, first) + @intCast(u32, slice.len - remaining.len);
}
@@ -1322,7 +1462,45 @@ pub fn firstNonASCII(slice: []const u8) ?u32 {
}
for (remaining) |char, i| {
- if (char > 127) {
+ if (char > 127 or char < 0x20 or char == '\\' or char == '"') {
+ return @truncate(u32, i + (slice.len - remaining.len));
+ }
+ }
+
+ return null;
+}
+
+test "indexOfNeedsEscape" {
+ const out = indexOfNeedsEscape(
+ \\la la la la la la la la la la la la la la la la "oh!" okay "well"
+ ,
+ );
+ try std.testing.expectEqual(out.?, 48);
+}
+
+pub fn indexOfChar(slice: []const u8, char: u8) ?u32 {
+ var remaining = slice;
+ if (remaining.len == 0)
+ return null;
+
+ if (remaining[0] == char)
+ return 0;
+
+ if (comptime Environment.isAarch64 or Environment.isX64) {
+ while (remaining.len >= ascii_vector_size) {
+ const vec: AsciiVector = remaining[0..ascii_vector_size].*;
+ const cmp = vec == @splat(ascii_vector_size, char);
+ const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*;
+ const first = @ctz(AsciiVectorInt, bitmask);
+ if (first < 16) {
+ return @intCast(u32, @as(u32, first) + @intCast(u32, slice.len - remaining.len));
+ }
+ remaining = remaining[ascii_vector_size..];
+ }
+ }
+
+ for (remaining) |c, i| {
+ if (c == char) {
return @truncate(u32, i + (slice.len - remaining.len));
}
}
@@ -1330,6 +1508,102 @@ pub fn firstNonASCII(slice: []const u8) ?u32 {
return null;
}
+test "indexOfChar" {
+ const pairs = .{
+ .{
+ "fooooooboooooofoooooofoooooofoooooofoooooozball",
+ 'b',
+ },
+ .{
+ "foooooofoooooofoooooofoooooofoooooofoooooozball",
+ 'z',
+ },
+ .{
+ "foooooofoooooofoooooofoooooofoooooofoooooozball",
+ 'a',
+ },
+ .{
+ "foooooofoooooofoooooofoooooofoooooofoooooozball",
+ 'l',
+ },
+ .{
+ "baconaopsdkaposdkpaosdkpaosdkaposdkpoasdkpoaskdpoaskdpoaskdpo;",
+ ';',
+ },
+ .{
+ ";baconaopsdkaposdkpaosdkpaosdkaposdkpoasdkpoaskdpoaskdpoaskdpo;",
+ ';',
+ },
+ };
+ inline for (pairs) |pair| {
+ try std.testing.expectEqual(
+ indexOfChar(pair.@"0", pair.@"1").?,
+ @truncate(u32, std.mem.indexOfScalar(u8, pair.@"0", pair.@"1").?),
+ );
+ }
+}
+
+pub fn indexOfNotChar(slice: []const u8, char: u8) ?u32 {
+ var remaining = slice;
+ if (remaining.len == 0)
+ return null;
+
+ if (remaining[0] != char)
+ return 0;
+
+ if (comptime Environment.isAarch64 or Environment.isX64) {
+ while (remaining.len >= ascii_vector_size) {
+ const vec: AsciiVector = remaining[0..ascii_vector_size].*;
+ const cmp = vec != @splat(ascii_vector_size, char);
+ const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*;
+ const first = @ctz(AsciiVectorInt, bitmask);
+ if (first < ascii_vector_size) {
+ return @as(u32, first) + @intCast(u32, slice.len - remaining.len);
+ }
+
+ remaining = remaining[ascii_vector_size..];
+ }
+ }
+
+ while (remaining.len > 0) {
+ if (remaining[0] != char) {
+ return @truncate(u32, (slice.len - remaining.len));
+ }
+ remaining = remaining[1..];
+ }
+
+ return null;
+}
+
+pub fn trimLeadingChar(slice: []const u8, char: u8) []const u8 {
+ if (indexOfNotChar(slice, char)) |i| {
+ return slice[i..];
+ }
+
+ return "";
+}
+
+pub fn containsAnyBesidesChar(bytes: []const u8, char: u8) bool {
+ var remain = bytes;
+ while (remain.len >= ascii_vector_size) {
+ const vec: AsciiVector = remain[0..ascii_vector_size].*;
+ const comparator = @splat(ascii_vector_size, char);
+ remain = remain[ascii_vector_size..];
+ if ((@reduce(.Or, vec != comparator))) {
+ return true;
+ }
+ }
+
+ while (remain.len > 0) {
+ if (remain[0] != char) {
+ return true;
+ }
+ remain = remain[1..];
+ }
+
+ return bytes.len > 0;
+}
+
pub fn firstNonASCII16(comptime Slice: type, slice: Slice) ?u32 {
var remaining = slice;
@@ -1372,6 +1646,24 @@ pub fn firstNonASCII16(comptime Slice: type, slice: Slice) ?u32 {
return null;
}
+test "indexOfNotChar" {
+ {
+ const yes = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ try std.testing.expectEqual(indexOfNotChar(yes, 'a').?, 36);
+ }
+ {
+ const yes = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ try std.testing.expectEqual(indexOfNotChar(yes, 'a').?, 108);
+ }
+}
+
+test "trimLeadingChar" {
+ {
+ const yes = " fooo bar";
+ try std.testing.expectEqualStrings(trimLeadingChar(yes, ' '), "fooo bar");
+ }
+}
+
test "isAllASCII" {
const yes = "aspdokasdpokasdpokasd aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasd aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasd aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasd aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123aspdokasdpokasdpokasdaspdokasdpokasdpokasdaspdokasdpokasdpokasd123123";
try std.testing.expectEqual(true, isAllASCII(yes));
diff --git a/src/string_joiner.zig b/src/string_joiner.zig
new file mode 100644
index 000000000..d49c32867
--- /dev/null
+++ b/src/string_joiner.zig
@@ -0,0 +1,80 @@
+/// Rope-like data structure for joining many small strings into one big string.
+const Joiner = @This();
+
+const string = @import("string_types.zig").string;
+const Allocator = @import("std").mem.Allocator;
+const assert = @import("std").debug.assert;
+const copy = @import("std").mem.copy;
+const Env = @import("./env.zig");
+const ObjectPool = @import("./pool.zig").ObjectPool;
+
+const default_allocator = @import("./global.zig").default_allocator;
+
+const Joinable = struct {
+ offset: u31 = 0,
+ needs_deinit: bool = false,
+ allocator: std.mem.Allocator = undefined,
+ slice: []const u8 = "",
+
+ pub const Pool = ObjectPool(Joinable, null, true, 4);
+};
+
+last_byte: u8 = 0,
+len: usize = 0,
+
+head: ?*Joinable.Pool.Node = null,
+tail: ?*Joinable.Pool.Node = null,
+
+pub fn done(this: *Joiner, allocator: std.mem.Allocator) ![]u8 {
+ var slice = try allocator.alloc(u8, this.cap);
+ var remaining = slice;
+ var el_ = this.head;
+ while (el_) |join| {
+ const to_join = join.data.slice[join.offset..];
+ @memcpy(remaining.ptr, to_join.ptr, to_join.len);
+
+ remaining = remaining[to_join.len..];
+
+ var prev = join;
+ el_ = join.next;
+ if (prev.data.needs_deinit) {
+ prev.data.allocator.free(join.data.slice);
+ prev.data = Joinable{};
+ }
+ prev.release();
+ }
+
+ return slice[0 .. slice.len - remaining.len];
+}
+
+pub fn lastByte(this: *const Joiner) u8 {
+ if (this.tail) |tail| {
+ const slice = tail.data.slice[tail.data.offset..];
+ return if (slice.len > 0) slice[slice.len - 1] else 0;
+ }
+
+ return 0;
+}
+
+pub fn append(this: *Joiner, slice: string, offset: u32, allocator: ?std.mem.Allocator) void {
+ const data = slice[offset..];
+ this.len += @truncate(u32, data.len);
+
+ var new_tail = Joinable.Pool.get(default_allocator);
+ new_tail.data = Joinable{
+ .offset = offset,
+ .allocator = allocator orelse undefined,
+ .needs_deinit = allocator != null,
+ .slice = slice,
+ };
+
+ var tail = this.tail orelse {
+ this.tail = new_tail;
+ this.head = new_tail;
+ return;
+ };
+ tail.next = new_tail;
+ this.tail = new_tail;
+}
+
+const std = @import("std");
diff --git a/src/string_mutable.zig b/src/string_mutable.zig
index e8a46af9b..7ef05fbe7 100644
--- a/src/string_mutable.zig
+++ b/src/string_mutable.zig
@@ -50,6 +50,12 @@ pub const MutableString = struct {
std.ArrayListUnmanaged(u8){} };
}
+ pub fn initEmpty(allocator: std.mem.Allocator) MutableString {
+ return MutableString{ .allocator = allocator, .list = .{} };
+ }
+
+ pub const ensureUnusedCapacity = growIfNeeded;
+
pub fn initCopy(allocator: std.mem.Allocator, str: anytype) !MutableString {
var mutable = try MutableString.init(allocator, std.mem.len(str));
try mutable.copy(str);
@@ -146,6 +152,10 @@ pub const MutableString = struct {
try self.list.ensureUnusedCapacity(self.allocator, amount);
}
+ pub inline fn appendSlice(self: *MutableString, slice: []const u8) !void {
+ try self.list.appendSlice(self.allocator, slice);
+ }
+
pub inline fn reset(
self: *MutableString,
) void {
@@ -202,23 +212,23 @@ pub const MutableString = struct {
// self.list.swapRemove(i);
// }
- pub fn containsChar(self: *MutableString, char: u8) bool {
+ pub fn containsChar(self: *const MutableString, char: u8) bool {
return self.indexOfChar(char) != null;
}
- pub fn indexOfChar(self: *MutableString, char: u8) ?usize {
- return std.mem.indexOfScalar(@TypeOf(char), self.list.items, char);
+ pub fn indexOfChar(self: *const MutableString, char: u8) ?usize {
+ return strings.indexOfChar(self.list.items, char);
}
- pub fn lastIndexOfChar(self: *MutableString, char: u8) ?usize {
- return std.mem.lastIndexOfScalar(@TypeOf(char), self.list.items, char);
+ pub fn lastIndexOfChar(self: *const MutableString, char: u8) ?usize {
+ return strings.lastIndexOfChar(self.list.items, char);
}
- pub fn lastIndexOf(self: *MutableString, str: u8) ?usize {
- return std.mem.lastIndexOf(u8, self.list.items, str);
+ pub fn lastIndexOf(self: *const MutableString, str: u8) ?usize {
+ return strings.lastIndexOfChar(self.list.items, str);
}
- pub fn indexOf(self: *MutableString, str: u8) ?usize {
+ pub fn indexOf(self: *const MutableString, str: u8) ?usize {
return std.mem.indexOf(u8, self.list.items, str);
}
@@ -226,6 +236,15 @@ pub const MutableString = struct {
return std.mem.eql(u8, self.list.items, other);
}
+ pub fn toSocketBuffers(self: *MutableString, comptime count: usize, ranges: anytype) [count]std.x.os.Buffer {
+ var buffers: [count]std.x.os.Buffer = undefined;
+ comptime var i: usize = 0;
+ inline while (i < count) : (i += 1) {
+ buffers[i] = std.x.os.Buffer.from(self.list.items[ranges[i][0]..ranges[i][1]]);
+ }
+ return buffers;
+ }
+
pub const BufferedWriter = struct {
context: *MutableString,
buffer: [max]u8 = undefined,
diff --git a/src/thread_pool.zig b/src/thread_pool.zig
index 014519680..64dd7d2de 100644
--- a/src/thread_pool.zig
+++ b/src/thread_pool.zig
@@ -182,7 +182,11 @@ noinline fn notifySlow(self: *ThreadPool, is_waking: bool) void {
// We signaled to spawn a new thread
if (can_wake and sync.spawned < self.max_threads) {
- const spawn_config = std.Thread.SpawnConfig{ .stack_size = self.stack_size };
+ const spawn_config = std.Thread.SpawnConfig{
+ // stack size must be a multiple of page_size
+ // macOS will fail to spawn a thread if the stack size is not a multiple of page_size
+ .stack_size = ((self.stack_size + (std.mem.page_size / 2)) / std.mem.page_size) * std.mem.page_size,
+ };
const thread = std.Thread.spawn(spawn_config, Thread.run, .{self}) catch return self.unregister(null);
// if (self.name.len > 0) thread.setName(self.name) catch {};
return thread.detach();