diff options
author | 2021-06-14 01:49:53 -0700 | |
---|---|---|
committer | 2021-06-14 01:49:53 -0700 | |
commit | 122ef023dd642af830a5419b6172640ebf8af841 (patch) | |
tree | 8c5a84c20758b174742084fa0a3150490320c9b8 | |
parent | c51c65325faf6692d3eebf92927f56cf35f6b613 (diff) | |
download | bun-122ef023dd642af830a5419b6172640ebf8af841.tar.gz bun-122ef023dd642af830a5419b6172640ebf8af841.tar.zst bun-122ef023dd642af830a5419b6172640ebf8af841.zip |
extremely close!!!!!
Former-commit-id: 44fce3c5e800f3fb3fbc139a38f14eae9e0c0225
-rw-r--r-- | demos/simple-react/src/components/button.tsx | 2 | ||||
-rw-r--r-- | src/api/schema.d.ts | 2 | ||||
-rw-r--r-- | src/api/schema.js | 8 | ||||
-rw-r--r-- | src/api/schema.peechy | 5 | ||||
-rw-r--r-- | src/api/schema.zig | 21 | ||||
-rw-r--r-- | src/bundler.zig | 26 | ||||
-rw-r--r-- | src/cli.zig | 4 | ||||
-rw-r--r-- | src/http.zig | 401 | ||||
-rw-r--r-- | src/http/websocket.zig | 55 | ||||
-rw-r--r-- | src/js_ast.zig | 6 | ||||
-rw-r--r-- | src/js_parser/imports.zig | 1 | ||||
-rw-r--r-- | src/js_parser/js_parser.zig | 491 | ||||
-rw-r--r-- | src/js_printer.zig | 50 | ||||
-rw-r--r-- | src/linker.zig | 21 | ||||
-rw-r--r-- | src/logger.zig | 45 | ||||
-rw-r--r-- | src/options.zig | 28 | ||||
-rw-r--r-- | src/runtime.zig | 21 | ||||
-rw-r--r-- | src/runtime/hmr.ts | 300 | ||||
-rw-r--r-- | src/string_mutable.zig | 5 | ||||
-rw-r--r-- | src/watcher.zig | 12 |
20 files changed, 1312 insertions, 192 deletions
diff --git a/demos/simple-react/src/components/button.tsx b/demos/simple-react/src/components/button.tsx index 3c62b1816..2b4ae9483 100644 --- a/demos/simple-react/src/components/button.tsx +++ b/demos/simple-react/src/components/button.tsx @@ -2,6 +2,6 @@ import React from "react"; export const Button = ({ label, label2, onClick }) => ( <div className="Button" onClick={onClick}> - <div className="Button-label">{label}111</div> + <div className="Button-label">{label}</div> </div> ); diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts index 43c8bc47f..902c376ff 100644 --- a/src/api/schema.d.ts +++ b/src/api/schema.d.ts @@ -314,7 +314,7 @@ type uint32 = number; loader: Loader; module_path: alphanumeric; log: Log; - bytes: Uint8Array; + blob_length: uint32; } export interface WebsocketMessageBuildFailure { diff --git a/src/api/schema.js b/src/api/schema.js index 71fde7436..9fdd5e73d 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -1393,7 +1393,7 @@ function decodeWebsocketMessageBuildSuccess(bb) { result["loader"] = Loader[bb.readByte()]; result["module_path"] = bb.readAlphanumeric(); result["log"] = decodeLog(bb); - result["bytes"] = bb.readByteArray(); + result["blob_length"] = bb.readUint32(); return result; } @@ -1436,11 +1436,11 @@ bb.writeByte(encoded); throw new Error("Missing required field \"log\""); } - var value = message["bytes"]; + var value = message["blob_length"]; if (value != null) { - bb.writeByteArray(value); + bb.writeUint32(value); } else { - throw new Error("Missing required field \"bytes\""); + throw new Error("Missing required field \"blob_length\""); } } diff --git a/src/api/schema.peechy b/src/api/schema.peechy index 6fa6912ef..48387906b 100644 --- a/src/api/schema.peechy +++ b/src/api/schema.peechy @@ -290,9 +290,12 @@ struct WebsocketMessageBuildSuccess { alphanumeric module_path; Log log; - byte[] bytes; + + // This is the length of the blob that immediately follows this message. + uint32 blob_length; } + struct WebsocketMessageBuildFailure { uint32 id; uint32 from_timestamp; diff --git a/src/api/schema.zig b/src/api/schema.zig index 106815345..94c5bb643 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -206,13 +206,16 @@ pub fn Writer(comptime WritableStream: type) type { pub fn writeValue(this: *Self, slice: anytype) !void { switch (@TypeOf(slice)) { - []u8, + []u8, []const u8 => { + try this.writeArray(u8, slice); + }, + []u16, []u32, []i16, []i32, []i8, - []const u8, + []const u16, []const u32, []const i16, @@ -276,7 +279,7 @@ pub fn Writer(comptime WritableStream: type) type { }; } -pub const ByteWriter = Writer(std.io.FixedBufferStream([]u8)); +pub const ByteWriter = Writer(*std.io.FixedBufferStream([]u8)); pub const FileWriter = Writer(std.fs.File); pub const Api = struct { @@ -1413,8 +1416,8 @@ pub const Api = struct { /// log log: Log, - /// bytes - bytes: []const u8, + /// blob_length + blob_length: u32 = 0, pub fn decode(reader: anytype) anyerror!WebsocketMessageBuildSuccess { var this = std.mem.zeroes(WebsocketMessageBuildSuccess); @@ -1424,7 +1427,7 @@ pub const Api = struct { this.loader = try reader.readValue(Loader); this.module_path = try reader.readValue([]const u8); this.log = try reader.readValue(Log); - this.bytes = try reader.readArray(u8); + this.blob_length = try reader.readValue(u32); return this; } @@ -1432,9 +1435,9 @@ pub const Api = struct { try writer.writeInt(this.id); try writer.writeInt(this.from_timestamp); try writer.writeEnum(this.loader); - try writer.writeValue(this.module_path); + try writer.writeArray(u8, this.module_path); try writer.writeValue(this.log); - try writer.writeArray(u8, this.bytes); + try writer.writeInt(this.blob_length); } }; @@ -1469,7 +1472,7 @@ pub const Api = struct { try writer.writeInt(this.id); try writer.writeInt(this.from_timestamp); try writer.writeEnum(this.loader); - try writer.writeValue(this.module_path); + try writer.writeArray(u8, this.module_path); try writer.writeValue(this.log); } }; diff --git a/src/bundler.zig b/src/bundler.zig index cbdb5586a..286964281 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -165,7 +165,12 @@ pub fn NewBundler(cache_files: bool) type { js_ast.Expr.Data.Store.reset(); js_ast.Stmt.Data.Store.reset(); var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false); - const bundle_options = try options.BundleOptions.fromApi(allocator, fs, log, opts); + const bundle_options = try options.BundleOptions.fromApi( + allocator, + fs, + log, + opts, + ); // var pool = try allocator.create(ThreadPool); // try pool.init(ThreadPool.InitConfig{ @@ -842,6 +847,7 @@ pub fn NewBundler(cache_files: bool) type { writer: Writer, comptime import_path_format: options.BundleOptions.ImportPathFormat, file_descriptor: ?StoredFileDescriptorType, + filepath_hash: u32, ) !BuildResolveResultPair { if (resolve_result.is_external) { return BuildResolveResultPair{ @@ -858,7 +864,7 @@ pub fn NewBundler(cache_files: bool) type { var old_bundler_allocator = bundler.allocator; bundler.allocator = allocator; defer bundler.allocator = old_bundler_allocator; - var result = bundler.parse(allocator, file_path, loader, resolve_result.dirname_fd, file_descriptor) orelse { + var result = bundler.parse(allocator, file_path, loader, resolve_result.dirname_fd, file_descriptor, filepath_hash) orelse { bundler.resetStore(); return BuildResolveResultPair{ .written = 0, @@ -900,7 +906,14 @@ pub fn NewBundler(cache_files: bool) type { switch (loader) { .jsx, .tsx, .js, .ts, .json => { - var result = bundler.parse(bundler.allocator, file_path, loader, resolve_result.dirname_fd, null) orelse { + var result = bundler.parse( + bundler.allocator, + file_path, + loader, + resolve_result.dirname_fd, + null, + null, + ) orelse { return null; }; @@ -1056,8 +1069,10 @@ pub fn NewBundler(cache_files: bool) type { allocator: *std.mem.Allocator, path: Fs.Path, loader: options.Loader, + // only used when file_descriptor is null dirname_fd: StoredFileDescriptorType, file_descriptor: ?StoredFileDescriptorType, + file_hash: ?u32, ) ?ParseResult { if (FeatureFlags.tracing) { bundler.timer.start(); @@ -1088,10 +1103,11 @@ pub fn NewBundler(cache_files: bool) type { var jsx = bundler.options.jsx; jsx.parse = loader.isJSX(); var opts = js_parser.Parser.Options.init(jsx, loader); - opts.enable_bundling = bundler.options.node_modules_bundle != null; + opts.enable_bundling = false; opts.transform_require_to_import = true; opts.can_import_from_bundle = bundler.options.node_modules_bundle != null; opts.features.hot_module_reloading = bundler.options.hot_module_reloading; + opts.filepath_hash_for_hmr = file_hash orelse 0; const value = (bundler.resolver.caches.js.parse(allocator, opts, bundler.options.define, bundler.log, &source) catch null) orelse return null; return ParseResult{ .ast = value, @@ -1555,7 +1571,7 @@ pub const Transformer = struct { js_ast.Expr.Data.Store.create(allocator); js_ast.Stmt.Data.Store.create(allocator); - var define = try options.definesFromTransformOptions(allocator, log, opts.define); + var define = try options.definesFromTransformOptions(allocator, log, opts.define, false); const cwd = if (opts.absolute_working_dir) |workdir| try std.fs.realpathAlloc(allocator, workdir) else try std.process.getCwdAlloc(allocator); diff --git a/src/cli.zig b/src/cli.zig index c6b1c38bd..81f7b1c87 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -23,10 +23,6 @@ const bundler = @import("bundler.zig"); const fs = @import("fs.zig"); -pub fn constStrToU8(s: string) []u8 { - return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len]; -} - const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle; pub const Cli = struct { diff --git a/src/http.zig b/src/http.zig index 3eba74bfa..b87b1d67a 100644 --- a/src/http.zig +++ b/src/http.zig @@ -2,8 +2,19 @@ const std = @import("std"); usingnamespace @import("global.zig"); const Api = @import("./api/schema.zig").Api; +const ApiReader = @import("./api/schema.zig").Reader; +const ApiWriter = @import("./api/schema.zig").Writer; +const ByteApiWriter = @import("./api/schema.zig").ByteWriter; +const NewApiWriter = @import("./api/schema.zig").Writer; +const js_ast = @import("./js_ast.zig"); const bundler = @import("bundler.zig"); const logger = @import("logger.zig"); +const Fs = @import("./fs.zig"); +pub fn constStrToU8(s: string) []u8 { + return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len]; +} + +pub const MutableStringAPIWriter = NewApiWriter(*MutableString); const tcp = std.x.net.tcp; const ip = std.x.net.ip; @@ -26,6 +37,7 @@ const SOCKET_FLAGS = os.SOCK_CLOEXEC; const watcher = @import("./watcher.zig"); threadlocal var req_headers_buf: [100]picohttp.Header = undefined; threadlocal var res_headers_buf: [100]picohttp.Header = undefined; +const sync = @import("./sync.zig"); const Watcher = watcher.NewWatcher(*Server); @@ -172,6 +184,7 @@ pub const RequestContext = struct { mime_type: MimeType = MimeType.other, controlled: bool = false, watcher: *Watcher, + timer: std.time.Timer, res_headers_count: usize = 0, @@ -278,6 +291,7 @@ pub const RequestContext = struct { conn: *tcp.Connection, bundler_: *Bundler, watcher_: *Watcher, + timer: std.time.Timer, ) !RequestContext { var ctx = RequestContext{ .request = req, @@ -289,6 +303,7 @@ pub const RequestContext = struct { .allocator = undefined, .method = Method.which(req.method) orelse return error.InvalidMethod, .watcher = watcher_, + .timer = timer, }; return ctx; @@ -386,12 +401,203 @@ pub const RequestContext = struct { ); } + pub const WatchBuilder = struct { + watcher: *Watcher, + bundler: *Bundler, + allocator: *std.mem.Allocator, + printer: js_printer.BufferPrinter, + timer: std.time.Timer, + + pub const WatchBuildResult = struct { + value: Value, + id: u32, + timestamp: u32, + bytes: []const u8 = "", + pub const Value = union(Tag) { + success: Api.WebsocketMessageBuildSuccess, + fail: Api.WebsocketMessageBuildFailure, + }; + pub const Tag = enum { + success, + fail, + }; + }; + pub fn build(this: *WatchBuilder, id: u32, from_timestamp: u32) !WatchBuildResult { + var log = logger.Log.init(this.allocator); + errdefer log.deinit(); + + const index = std.mem.indexOfScalar(u32, this.watcher.watchlist.items(.hash), id) orelse { + + // log.addErrorFmt(null, logger.Loc.Empty, this, "File missing from watchlist: {d}. Please refresh :(", .{hash}) catch unreachable; + return WatchBuildResult{ + .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) }, + .id = id, + .timestamp = WebsocketHandler.toTimestamp(this.timer.read()), + }; + }; + + const file_path_str = this.watcher.watchlist.items(.file_path)[index]; + const fd = this.watcher.watchlist.items(.fd)[index]; + const loader = this.watcher.watchlist.items(.loader)[index]; + + switch (loader) { + .json, .ts, .tsx, .js, .jsx => { + // Since we already have: + // - The file descriptor + // - The path + // - The loader + // We can skip resolving. We will need special handling for renaming where basically we: + // - Update the watch item. + // - Clear directory cache + const path = Fs.Path.init(file_path_str); + var old_log = this.bundler.log; + defer this.bundler.log = old_log; + this.bundler.log = &log; + this.bundler.resetStore(); + var parse_result = this.bundler.parse( + this.bundler.allocator, + path, + loader, + 0, + fd, + id, + ) orelse { + return WatchBuildResult{ + .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) }, + .id = id, + .timestamp = WebsocketHandler.toTimestamp(this.timer.read()), + }; + }; + + this.printer.ctx.reset(); + + var written = this.bundler.print(parse_result, @TypeOf(this.printer), this.printer) catch |err| { + return WatchBuildResult{ + .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) }, + .id = id, + .timestamp = WebsocketHandler.toTimestamp(this.timer.read()), + }; + }; + + return WatchBuildResult{ + .value = .{ + .success = .{ + .id = id, + .from_timestamp = from_timestamp, + .loader = parse_result.loader.toAPI(), + .module_path = file_path_str, + .blob_length = @truncate(u32, written), + .log = std.mem.zeroes(Api.Log), + }, + }, + .id = id, + .bytes = this.printer.ctx.written, + .timestamp = WebsocketHandler.toTimestamp(this.timer.read()), + }; + }, + else => { + return WatchBuildResult{ + .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) }, + .id = id, + .timestamp = WebsocketHandler.toTimestamp(this.timer.read()), + }; + }, + } + } + }; + pub const WebsocketHandler = struct { accept_key: [28]u8 = undefined, ctx: RequestContext, + websocket: Websocket.Websocket, + conn: tcp.Connection, + tombstone: bool = false, + builder: WatchBuilder, + message_buffer: MutableString, + pub var open_websockets: std.ArrayList(*WebsocketHandler) = undefined; + var open_websockets_lock = sync.RwLock.init(); + pub fn addWebsocket(ctx: *RequestContext) !*WebsocketHandler { + open_websockets_lock.lock(); + defer open_websockets_lock.unlock(); + var clone = try ctx.allocator.create(WebsocketHandler); + clone.ctx = ctx.*; + clone.conn = ctx.conn.*; + clone.message_buffer = try MutableString.init(ctx.allocator, 0); + clone.ctx.conn = &clone.conn; + var printer_writer = try js_printer.BufferWriter.init(ctx.allocator); + + clone.builder = WatchBuilder{ + .allocator = ctx.allocator, + .bundler = ctx.bundler, + .printer = js_printer.BufferPrinter.init(printer_writer), + .timer = ctx.timer, + .watcher = ctx.watcher, + }; + + clone.websocket = Websocket.Websocket.create(ctx, SOCKET_FLAGS); + clone.tombstone = false; + try open_websockets.append(clone); + return clone; + } + pub var to_close_buf: [100]*WebsocketHandler = undefined; + pub var to_close: []*WebsocketHandler = &[_]*WebsocketHandler{}; - pub fn handle(self: WebsocketHandler) void { - var this = self; + pub fn generateTimestamp(handler: *WebsocketHandler) u32 { + return @truncate(u32, handler.ctx.timer.read() / std.time.ns_per_ms); + } + + pub fn toTimestamp(timestamp: u64) u32 { + return @truncate(u32, timestamp / std.time.ns_per_ms); + } + + pub fn broadcast(message: []const u8) !void { + { + open_websockets_lock.lockShared(); + defer open_websockets_lock.unlockShared(); + var markForClosing = false; + for (open_websockets.items) |item| { + var socket: *WebsocketHandler = item; + const written = socket.websocket.writeBinary(message) catch |err| brk: { + Output.prettyError("<r>WebSocket error: <b>{d}", .{@errorName(err)}); + markForClosing = true; + break :brk 0; + }; + + if (written < message.len) { + markForClosing = true; + } + + if (markForClosing) { + to_close_buf[to_close.len] = item; + to_close = to_close_buf[0 .. to_close.len + 1]; + } + } + } + + if (to_close.len > 0) { + open_websockets_lock.lock(); + defer open_websockets_lock.unlock(); + for (to_close) |item| { + WebsocketHandler.removeBulkWebsocket(item); + } + to_close = &[_]*WebsocketHandler{}; + } + } + + pub fn removeWebsocket(socket: *WebsocketHandler) void { + open_websockets_lock.lock(); + defer open_websockets_lock.unlock(); + removeBulkWebsocket(socket); + } + + pub fn removeBulkWebsocket(socket: *WebsocketHandler) void { + if (std.mem.indexOfScalar(*WebsocketHandler, open_websockets.items, socket)) |id| { + socket.tombstone = true; + _ = open_websockets.swapRemove(id); + } + } + + pub fn handle(self: *WebsocketHandler) void { var stdout = std.io.getStdOut(); // var stdout = std.io.bufferedWriter(stdout_file.writer()); var stderr = std.io.getStdErr(); @@ -401,12 +607,16 @@ pub const RequestContext = struct { // defer stderr.flush() catch {}; Output.Source.set(&output_source); Output.enable_ansi_colors = stderr.isTty(); - - _handle(&this) catch {}; + js_ast.Stmt.Data.Store.create(self.ctx.allocator); + js_ast.Expr.Data.Store.create(self.ctx.allocator); + _handle(self) catch {}; } fn _handle(handler: *WebsocketHandler) !void { var ctx = &handler.ctx; + defer handler.message_buffer.deinit(); + defer handler.tombstone = true; + defer removeWebsocket(handler); defer ctx.arena.deinit(); defer ctx.conn.deinit(); defer Output.flush(); @@ -441,21 +651,45 @@ pub const RequestContext = struct { ctx.appendHeader("Connection", "Upgrade"); ctx.appendHeader("Upgrade", "websocket"); ctx.appendHeader("Sec-WebSocket-Accept", key); + ctx.appendHeader("Sec-WebSocket-Protocol", "speedy-hmr"); try ctx.writeStatus(101); try ctx.flushHeaders(); Output.println("101 - Websocket connected.", .{}); Output.flush(); - var websocket = Websocket.Websocket.create(ctx, SOCKET_FLAGS); - _ = try websocket.writeText("Hello!"); + var cmd: Api.WebsocketCommand = undefined; + var msg: Api.WebsocketMessage = .{ + .timestamp = handler.generateTimestamp(), + .kind = .welcome, + }; + var cmd_reader: ApiReader = undefined; + var byte_buf: [32]u8 = undefined; + var fbs = std.io.fixedBufferStream(&byte_buf); + var writer = ByteApiWriter.init(&fbs); + + try msg.encode(&writer); + const welcome_message = Api.WebsocketMessageWelcome{ + .epoch = WebsocketHandler.toTimestamp(handler.ctx.timer.start_time), + }; + try welcome_message.encode(&writer); + if ((try handler.websocket.writeBinary(fbs.getWritten())) == 0) { + handler.tombstone = true; + Output.prettyErrorln("<r><red>ERR:<r> <b>Websocket failed to write.<r>", .{}); + } - while (true) { + while (!handler.tombstone) { defer Output.flush(); - var frame = websocket.read() catch |err| { + handler.conn.client.getError() catch |err| { + Output.prettyErrorln("<r><red>ERR:<r> <b>{s}<r>", .{err}); + handler.tombstone = true; + }; + + var frame = handler.websocket.read() catch |err| { switch (err) { error.ConnectionClosed => { Output.prettyln("Websocket closed.", .{}); - return; + handler.tombstone = true; + continue; }, else => { Output.prettyErrorln("<r><red>ERR:<r> <b>{s}<r>", .{err}); @@ -469,12 +703,74 @@ pub const RequestContext = struct { return; }, .Text => { - _ = try websocket.writeText(frame.data); + _ = try handler.websocket.writeText(frame.data); + }, + .Binary => { + var cnst_frame = constStrToU8(frame.data); + cmd_reader = ApiReader.init(cnst_frame, ctx.allocator); + cmd = try Api.WebsocketCommand.decode(&cmd_reader); + switch (cmd.kind) { + .build => { + var request = try Api.WebsocketCommandBuild.decode(&cmd_reader); + var build_result = try handler.builder.build(request.id, cmd.timestamp); + const file_path = switch (build_result.value) { + .fail => |fail| fail.module_path, + .success => |fail| fail.module_path, + }; + + Output.prettyln( + "<r>[{s}] Built <b>{s}<r><b>{d}ms", + .{ + @tagName(std.meta.activeTag(build_result.value)), + file_path, + build_result.timestamp - cmd.timestamp, + }, + ); + + defer Output.flush(); + msg.timestamp = build_result.timestamp; + msg.kind = switch (build_result.value) { + .success => .build_success, + else => .build_fail, + }; + handler.message_buffer.reset(); + var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer); + try msg.encode(&buffer_writer); + var head = Websocket.WebsocketHeader{ + .final = true, + .opcode = .Binary, + .mask = false, + .len = 0, + }; + + switch (build_result.value) { + .success => |success| { + try success.encode(&buffer_writer); + const total = handler.message_buffer.list.items.len + build_result.bytes.len; + head.len = Websocket.WebsocketHeader.packLength(total); + try handler.websocket.writeHeader(head, total); + _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS); + if (build_result.bytes.len > 0) { + _ = try handler.conn.client.write(build_result.bytes, SOCKET_FLAGS); + } + }, + .fail => |fail| { + try fail.encode(&buffer_writer); + head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len); + try handler.websocket.writeHeader(head, handler.message_buffer.list.items.len); + _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS); + }, + } + }, + else => { + Output.prettyErrorln("<r>[Websocket]: Unknown cmd: <b>{d}<r>. This might be a version mismatch. Try updating your node_modules.jsb", .{@enumToInt(cmd.kind)}); + }, + } }, .Ping => { var pong = frame; pong.header.opcode = .Pong; - _ = try websocket.writeDataFrame(pong); + _ = try handler.websocket.writeDataFrame(pong); }, else => { Output.prettyErrorln("Websocket unknown opcode: {s}", .{@tagName(frame.header.opcode)}); @@ -536,7 +832,7 @@ pub const RequestContext = struct { pub fn handleWebsocket(ctx: *RequestContext) anyerror!void { ctx.controlled = true; - var handler = WebsocketHandler{ .ctx = ctx.* }; + var handler = try WebsocketHandler.addWebsocket(ctx); _ = try std.Thread.spawn(WebsocketHandler.handle, handler); } @@ -679,9 +975,16 @@ pub const RequestContext = struct { chunked_encoder, .absolute_url, input_fd, + hash, ); if (written.input_fd) |written_fd| { - try ctx.watcher.addFile(written_fd, result.file.input.text, hash, true); + try ctx.watcher.addFile( + written_fd, + result.file.input.text, + hash, + loader, + true, + ); if (ctx.watcher.watchloop_handle == null) { try ctx.watcher.start(); } @@ -693,7 +996,13 @@ pub const RequestContext = struct { .copy, .move => |file| { // defer std.os.close(file.fd); defer { - if (ctx.watcher.addFile(file.fd, result.file.input.text, Watcher.getHash(result.file.input.text), true)) { + if (ctx.watcher.addFile( + file.fd, + result.file.input.text, + Watcher.getHash(result.file.input.text), + result.file.loader, + true, + )) { if (ctx.watcher.watchloop_handle == null) { ctx.watcher.start() catch |err| { Output.prettyErrorln("Failed to start watcher: {s}", .{@errorName(err)}); @@ -809,6 +1118,32 @@ pub const RequestContext = struct { } }; +// // u32 == File ID from Watcher +// pub const WatcherBuildChannel = sync.Channel(u32, .Dynamic); +// pub const WatcherBuildQueue = struct { +// channel: WatcherBuildChannel, +// bundler: *Bundler, +// watcher: *Watcher, +// allocator: *std.mem.Allocator, + +// pub fn start(queue: *@This()) void { +// var stdout = std.io.getStdOut(); +// var stderr = std.io.getStdErr(); +// var output_source = Output.Source.init(stdout, stderr); + +// Output.Source.set(&output_source); +// Output.enable_ansi_colors = stderr.isTty(); +// defer Output.flush(); +// queue.loop(); +// } + +// pub fn loop(queue: *@This()) !void { +// while (true) { + +// } +// } +// }; + // This is a tiny HTTP server. // It needs to support: // - Static files @@ -826,6 +1161,7 @@ pub const Server = struct { allocator: *std.mem.Allocator, bundler: Bundler, watcher: *Watcher, + timer: std.time.Timer = undefined, pub fn adjustUlimit() !void { var limit = try std.os.getrlimit(.NOFILE); @@ -845,10 +1181,38 @@ pub const Server = struct { server.handleConnection(&conn); } + threadlocal var filechange_buf: [32]u8 = undefined; + pub fn onFileUpdate(ctx: *Server, events: []watcher.WatchEvent, watchlist: watcher.Watchlist) void { + var fbs = std.io.fixedBufferStream(&filechange_buf); + var writer = ByteApiWriter.init(&fbs); + const message_type = Api.WebsocketMessage{ + .timestamp = RequestContext.WebsocketHandler.toTimestamp(ctx.timer.read()), + .kind = .file_change_notification, + }; + message_type.encode(&writer) catch unreachable; + var header = fbs.getWritten(); + for (events) |event| { - const item = watchlist.items(.file_path)[event.index]; - Output.prettyln("File changed: \"<b>{s}<r>\"", .{item}); + const file_path = watchlist.items(.file_path)[event.index]; + // so it's consistent with the rest + // if we use .extname we might run into an issue with whether or not the "." is included. + const path = Fs.PathName.init(file_path); + const id = watchlist.items(.hash)[event.index]; + var content_fbs = std.io.fixedBufferStream(filechange_buf[header.len..]); + const change_message = Api.WebsocketMessageFileChangeNotification{ + .id = id, + .loader = (ctx.bundler.options.loaders.get(path.ext) orelse .file).toAPI(), + }; + var content_writer = ByteApiWriter.init(&content_fbs); + change_message.encode(&content_writer) catch unreachable; + const change_buf = content_fbs.getWritten(); + const written_buf = filechange_buf[0 .. header.len + change_buf.len]; + defer Output.flush(); + RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| { + Output.prettyln("Error writing change notification: {s}", .{@errorName(err)}); + }; + Output.prettyln("Detected file change: {s}", .{file_path}); } } @@ -856,6 +1220,9 @@ pub const Server = struct { adjustUlimit() catch {}; const listener = try tcp.Listener.init(.ip, .{ .close_on_exec = true }); defer listener.deinit(); + RequestContext.WebsocketHandler.open_websockets = @TypeOf( + RequestContext.WebsocketHandler.open_websockets, + ).init(server.allocator); listener.setReuseAddress(true) catch {}; listener.setReusePort(true) catch {}; @@ -927,6 +1294,7 @@ pub const Server = struct { conn, &server.bundler, server.watcher, + server.timer, ) catch |err| { Output.printErrorln("<r>[<red>{s}<r>] - <b>{s}<r>: {s}", .{ @errorName(err), req.method, req.path }); conn.client.deinit(); @@ -984,6 +1352,7 @@ pub const Server = struct { .log = log, .bundler = undefined, .watcher = undefined, + .timer = try std.time.Timer.start(), }; server.bundler = try Bundler.init(allocator, &server.log, options); server.bundler.configureLinker(); diff --git a/src/http/websocket.zig b/src/http/websocket.zig index 9fb38a92f..be1645d49 100644 --- a/src/http/websocket.zig +++ b/src/http/websocket.zig @@ -131,7 +131,7 @@ pub const Websocket = struct { // Stream API // ------------------------------------------------------------------------ pub const TextFrameWriter = std.io.Writer(*Websocket, WriteError, Websocket.writeText); - pub const BinaryFrameWriter = std.io.Writer(*Websocket, WriteError, Websocket.writeBinary); + pub const BinaryFrameWriter = std.io.Writer(*Websocket, anyerror, Websocket.writeBinary); // A buffered writer that will buffer up to size bytes before writing out pub fn newWriter(self: *Websocket, comptime size: usize, comptime opcode: Opcode) Writer(size, opcode) { @@ -164,17 +164,17 @@ pub const Websocket = struct { return self.writeMessage(.Text, data); } - pub fn writeBinary(self: *Websocket, data: []const u8) !usize { + pub fn writeBinary(self: *Websocket, data: []const u8) anyerror!usize { return self.writeMessage(.Binary, data); } // Write a final message packet with the given opcode - pub fn writeMessage(self: *Websocket, opcode: Opcode, message: []const u8) !usize { + pub fn writeMessage(self: *Websocket, opcode: Opcode, message: []const u8) anyerror!usize { return self.writeSplitMessage(opcode, true, message); } // Write a message packet with the given opcode and final flag - pub fn writeSplitMessage(self: *Websocket, opcode: Opcode, final: bool, message: []const u8) !usize { + pub fn writeSplitMessage(self: *Websocket, opcode: Opcode, final: bool, message: []const u8) anyerror!usize { return self.writeDataFrame(WebsocketDataFrame{ .header = WebsocketHeader{ .final = final, @@ -186,8 +186,53 @@ pub const Websocket = struct { }); } + pub fn writeHeader(self: *Websocket, header: WebsocketHeader, n: usize) anyerror!void { + var stream = self.request.conn.client.writer(self.flags); + + try stream.writeIntBig(u16, @bitCast(u16, header)); + + // Write extended length if needed + switch (n) { + 0...126 => {}, // Included in header + 127...0xFFFF => try stream.writeIntBig(u16, @truncate(u16, n)), + else => try stream.writeIntBig(u64, n), + } + + // try self.io.flush(); + + } + + pub fn writeIterator(self: *Websocket, header: WebsocketHeader, count: usize, comptime BodyIterator: type, body_iter: BodyIterator) anyerror!usize { + var stream = self.request.conn.client.writer(self.flags); + + if (!dataframe.isValid()) return error.InvalidMessage; + + try stream.writeIntBig(u16, @bitCast(u16, header)); + + // Write extended length if needed + const n = count; + switch (n) { + 0...126 => {}, // Included in header + 127...0xFFFF => try stream.writeIntBig(u16, @truncate(u16, n)), + else => try stream.writeIntBig(u64, n), + } + + // TODO: Handle compression + if (dataframe.header.compressed) return error.InvalidMessage; + + std.debug.assert(header.mask == false); + + while (body_iter.next()) |chunk| { + try stream.writeAll(chunk); + } + + // try self.io.flush(); + + return count; + } + // Write a raw data frame - pub fn writeDataFrame(self: *Websocket, dataframe: WebsocketDataFrame) !usize { + pub fn writeDataFrame(self: *Websocket, dataframe: WebsocketDataFrame) anyerror!usize { var stream = self.request.conn.client.writer(self.flags); if (!dataframe.isValid()) return error.InvalidMessage; diff --git a/src/js_ast.zig b/src/js_ast.zig index ca33e661c..7cf301457 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -195,6 +195,10 @@ pub const Flags = struct { // Only applicable to function statements. is_export: bool = false, + // Used for Hot Module Reloading's wrapper function + // "iife" stands for "immediately invoked function expression" + print_as_iife: bool = false, + const None = Flags.Function{}; }; }; @@ -3530,7 +3534,7 @@ pub const Ast = struct { export_star_import_records: []u32 = &([_]u32{}), pub const NamedImports = std.ArrayHashMap(Ref, NamedImport, RefHashCtx, true); - pub const NamedExports = StringHashMap(NamedExport); + pub const NamedExports = std.StringArrayHashMap(NamedExport); pub fn initTest(parts: []Part) Ast { return Ast{ diff --git a/src/js_parser/imports.zig b/src/js_parser/imports.zig index e276d6260..b1ea18df2 100644 --- a/src/js_parser/imports.zig +++ b/src/js_parser/imports.zig @@ -10,6 +10,7 @@ pub const renamer = @import("../renamer.zig"); const _runtime = @import("../runtime.zig"); pub const RuntimeImports = _runtime.Runtime.Imports; pub const RuntimeFeatures = _runtime.Runtime.Features; +pub const RuntimeNames = _runtime.Runtime.Names; pub const fs = @import("../fs.zig"); const _hash_map = @import("../hash_map.zig"); pub usingnamespace @import("../global.zig"); diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig index 4033cdd8d..5497a664d 100644 --- a/src/js_parser/js_parser.zig +++ b/src/js_parser/js_parser.zig @@ -37,18 +37,19 @@ pub fn locAfterOp(e: E.Binary) logger.Loc { return e.left.loc; } } - +const ExportsStringName = "exports"; pub const ImportScanner = struct { stmts: []Stmt = &([_]Stmt{}), + kept_import_equals: bool = false, removed_import_equals: bool = false, - pub fn scan(p: anytype, stmts: []Stmt) !ImportScanner { + pub fn scan(comptime P: type, p: P, stmts: []Stmt, comptime convert_exports: bool) !ImportScanner { var scanner = ImportScanner{}; var stmts_end: usize = 0; - for (stmts) |_stmt| { + for (stmts) |_stmt, _stmt_i| { // zls needs the hint, it seems. - const stmt: Stmt = _stmt; + var stmt: Stmt = _stmt; switch (stmt.data) { .s_import => |st| { var record: ImportRecord = p.import_records.items[st.import_record_index]; @@ -364,7 +365,12 @@ pub const ImportScanner = struct { .s_function => |st| { if (st.func.flags.is_export) { if (st.func.name) |name| { - try p.recordExport(name.loc, p.symbols.items[name.ref.?.inner_index].original_name, name.ref.?); + const original_name = p.symbols.items[name.ref.?.inner_index].original_name; + try p.recordExport(name.loc, original_name, name.ref.?); + + if (p.options.features.hot_module_reloading) { + st.func.flags.is_export = false; + } } else { try p.log.addRangeError(p.source, logger.Range{ .loc = st.func.open_parens_loc, .len = 2 }, "Exported functions must have a name"); } @@ -374,6 +380,10 @@ pub const ImportScanner = struct { if (st.is_export) { if (st.class.class_name) |name| { try p.recordExport(name.loc, p.symbols.items[name.ref.?.inner_index].original_name, name.ref.?); + + if (p.options.features.hot_module_reloading) { + st.is_export = false; + } } else { try p.log.addRangeError(p.source, logger.Range{ .loc = st.class.body_loc, .len = 0 }, "Exported classes must have a name"); } @@ -418,14 +428,61 @@ pub const ImportScanner = struct { } } } + + // We must do this at the end to not mess up import = + if (p.options.features.hot_module_reloading and st.is_export) { + st.is_export = false; + } }, .s_export_default => |st| { try p.recordExport(st.default_name.loc, "default", st.default_name.ref.?); + // Rewrite this export to be: + // exports.default = + if (p.options.features.hot_module_reloading) { + var exports_default_ident = p.e(E.Dot{ .target = p.e(E.Identifier{ .ref = p.hmr_module_ref }, stmt.loc), .name = "default", .name_loc = st.default_name.loc }, stmt.loc); + + // export default can be: + // - an expression + // - a function + // - a class + switch (st.value) { + .expr => |ex| { + stmt = Expr.assignStmt(exports_default_ident, ex, p.allocator); + }, + .stmt => |class_or_func| { + switch (class_or_func.data) { + .s_function => |func| { + // convert this to an E.Function + stmt = Expr.assignStmt(exports_default_ident, p.e(E.Function{ .func = func.func }, stmt.loc), p.allocator); + }, + .s_class => |class| { + stmt = Expr.assignStmt(exports_default_ident, p.e( + E.Class{ + .class_keyword = class.class.class_keyword, + .ts_decorators = class.class.ts_decorators, + .class_name = class.class.class_name, + .extends = class.class.extends, + .body_loc = class.class.body_loc, + .properties = class.class.properties, + }, + stmt.loc, + ), p.allocator); + }, + else => unreachable, + } + }, + } + } }, .s_export_clause => |st| { for (st.items) |item| { try p.recordExport(item.alias_loc, item.alias, item.name.ref.?); } + + // export clauses simply disappear when we have HMR on, we use NamedExports to regenerate it at the end + if (p.options.features.hot_module_reloading) { + continue; + } }, .s_export_star => |st| { try p.import_records_for_current_part.append(st.import_record_index); @@ -1551,7 +1608,7 @@ pub const Parser = struct { preserve_unused_imports_ts: bool = false, use_define_for_class_fields: bool = false, suppress_warnings_about_weird_code: bool = true, - + filepath_hash_for_hmr: u32 = 0, features: RuntimeFeatures = RuntimeFeatures{}, // Used when bundling node_modules @@ -1906,7 +1963,7 @@ pub const Parser = struct { const uses_module_ref = p.symbols.items[p.module_ref.inner_index].use_count_estimate > 0; const uses_require_ref = p.symbols.items[p.require_ref.inner_index].use_count_estimate > 0; - var to_module_expr: ?Expr = null; + var wrapper_expr: ?Expr = null; if (p.es6_export_keyword.len > 0 or p.top_level_await_keyword.len > 0) { exports_kind = .esm; @@ -1914,7 +1971,7 @@ pub const Parser = struct { exports_kind = .cjs; if (p.options.transform_require_to_import) { var args = p.allocator.alloc(Expr, 2) catch unreachable; - to_module_expr = p.callRuntime(logger.Loc.Empty, "__commonJS", args); + wrapper_expr = p.callRuntime(logger.Loc.Empty, "__commonJS", args); } } else { exports_kind = .esm; @@ -1922,7 +1979,7 @@ pub const Parser = struct { var runtime_imports_iter = p.runtime_imports.iter(); // don't import runtime if we're bundling, it's already included - if (!p.options.transform_require_to_import) { + if (!p.options.enable_bundling) { while (runtime_imports_iter.next()) |entry| { const imports = [_]u16{entry.key}; p.generateImportStmt( @@ -1998,7 +2055,7 @@ pub const Parser = struct { // Pop the module scope to apply the "ContainsDirectEval" rules // p.popScope(); debugl("<result.Ast>"); - result.ast = try p.toAST(parts_slice, exports_kind, to_module_expr); + result.ast = try p.toAST(parts_slice, exports_kind, wrapper_expr); result.ok = true; debugl("</result.Ast>"); @@ -2063,6 +2120,26 @@ var s_missing = S.Empty{}; var nullExprData = Expr.Data{ .e_missing = e_missing_data }; var nullStmtData = Stmt.Data{ .s_empty = s_missing }; pub const Prefill = struct { + pub const HotModuleReloading = struct { + pub var DebugEnabledArgs = [_]Expr{ + Expr{ .data = .{ .e_boolean = E.Boolean{ .value = true } }, .loc = logger.Loc.Empty }, + }; + pub var DebugDisabled = [_]Expr{ + Expr{ .data = .{ .e_boolean = E.Boolean{ .value = false } }, .loc = logger.Loc.Empty }, + }; + pub var ActivateString = E.String{ + .utf8 = "activate", + }; + pub var ActivateIndex = E.Index{ + .index = .{ + .data = .{ + .e_string = &ActivateString, + }, + .loc = logger.Loc.Empty, + }, + .target = undefined, + }; + }; pub const StringLiteral = struct { pub var Key = [3]u16{ 'k', 'e', 'y' }; pub var Children = [_]u16{ 'c', 'h', 'i', 'l', 'd', 'r', 'e', 'n' }; @@ -2161,6 +2238,12 @@ pub fn NewParser( scopes_in_order_visitor_index: usize = 0, has_classic_runtime_warned: bool = false, + hmr_module_ref: js_ast.Ref = js_ast.Ref.None, + hmr_activate_ref: js_ast.Ref = js_ast.Ref.None, + hmr_client_ref: js_ast.Ref = js_ast.Ref.None, + hmr_module_class_ref: js_ast.Ref = js_ast.Ref.None, + hmr_exports_list: std.ArrayList(js_ast.ClauseItem), + cjs_import_stmts: std.ArrayList(Stmt), bundle_export_ref: ?Ref = null, @@ -2973,7 +3056,8 @@ pub fn NewParser( p.hoistSymbols(p.module_scope); - p.require_ref = try p.declareCommonJSSymbol(.unbound, "require"); + p.exports_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "exports"); + p.module_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "module"); if (p.options.enable_bundling) { p.bundle_export_ref = try p.declareSymbol(.unbound, logger.Loc.Empty, "IF_YOU_SEE_THIS_ITS_A_BUNDLER_BUG_PLEASE_FILE_AN_ISSUE_THX"); @@ -2981,17 +3065,23 @@ pub fn NewParser( p.runtime_imports.register = try p.declareSymbol(.unbound, logger.Loc.Empty, "$$m"); p.runtime_imports.lazy_export = try p.declareSymbol(.unbound, logger.Loc.Empty, "$$lzy"); - p.exports_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "exports"); - p.module_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "module"); p.runtime_imports.__export = p.exports_ref; + } else {} + + p.require_ref = try p.declareCommonJSSymbol(.unbound, "require"); + + if (p.options.features.hot_module_reloading) { + p.hmr_module_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "__hmrModule"); + p.runtime_imports.__HMRModule = try p.declareSymbol(.hoisted, logger.Loc.Empty, "__HMRModule"); + p.runtime_imports.__HMRClient = try p.declareSymbol(.hoisted, logger.Loc.Empty, "__HMRClient"); + p.recordUsage(p.hmr_module_ref); + p.recordUsage(p.runtime_imports.__HMRModule.?); + p.recordUsage(p.runtime_imports.__HMRClient.?); } else { - p.exports_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "exports"); - p.module_ref = try p.declareSymbol(.hoisted, logger.Loc.Empty, "module"); p.runtime_imports.__export = p.exports_ref; + p.runtime_imports.__require = p.require_ref; } - p.runtime_imports.__require = p.require_ref; - if (is_jsx_enabled) { if (p.options.jsx.development) { p.jsx_filename_ref = p.newSymbol(.hoisted, Prefill.Runtime.JSXFilename) catch unreachable; @@ -4647,12 +4737,12 @@ pub fn NewParser( }, .s_function => |func_container| { - if (stmt.getFunction().func.name) |name| { + if (func_container.func.name) |name| { break :default_name_getter LocRef{ .loc = defaultLoc, .ref = name.ref }; } else {} }, .s_class => |class| { - if (stmt.getClass().class.class_name) |name| { + if (class.class.class_name) |name| { break :default_name_getter LocRef{ .loc = defaultLoc, .ref = name.ref }; } else {} }, @@ -4691,12 +4781,12 @@ pub fn NewParser( }, .s_function => |func_container| { - if (stmt.getFunction().func.name) |_name| { + if (func_container.func.name) |_name| { break :default_name_getter LocRef{ .loc = defaultLoc, .ref = _name.ref }; } else {} }, .s_class => |class| { - if (stmt.getClass().class.class_name) |_name| { + if (class.class.class_name) |_name| { break :default_name_getter LocRef{ .loc = defaultLoc, .ref = _name.ref }; } else {} }, @@ -4882,7 +4972,9 @@ pub fn NewParser( try p.requireInitializers(decls); } - return p.s(S.Local{ .kind = .k_const, .decls = decls, .is_export = opts.is_export }, loc); + // When HMR is enabled, replace all const/let exports with var + const kind = if (p.options.features.hot_module_reloading and opts.is_export) S.Local.Kind.k_var else S.Local.Kind.k_const; + return p.s(S.Local{ .kind = kind, .decls = decls, .is_export = opts.is_export }, loc); }, .t_if => { try p.lexer.next(); @@ -6009,7 +6101,7 @@ pub fn NewParser( pub fn parseExprOrLetStmt(p: *P, opts: *ParseStatementOptions) !ExprOrLetStmt { var let_range = p.lexer.range(); var raw = p.lexer.raw(); - if (p.lexer.token != .t_identifier or !strings.eql(raw, "let")) { + if (p.lexer.token != .t_identifier or !strings.eqlComptime(raw, "let")) { // Output.print("HI", .{}); return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .expr = try p.parseExpr(.lowest) } }; } @@ -6027,7 +6119,8 @@ pub fn NewParser( return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .stmt = p.s(S.Local{ - .kind = .k_let, + // Replace all "export let" with "export var" when HMR is enabled + .kind = if (opts.is_export and p.options.features.hot_module_reloading) .k_var else .k_let, .decls = decls, .is_export = opts.is_export, }, let_range.loc), @@ -11391,6 +11484,8 @@ pub fn NewParser( } } } + + if (p.options.features.hot_module_reloading) {} } return null; @@ -13202,7 +13297,17 @@ pub fn NewParser( p.import_records_for_current_part.shrinkRetainingCapacity(0); p.declared_symbols.shrinkRetainingCapacity(0); - var result = try ImportScanner.scan(p, part.stmts); + var result = if (p.options.features.hot_module_reloading) try ImportScanner.scan( + *P, + p, + part.stmts, + true, + ) else try ImportScanner.scan( + *P, + p, + part.stmts, + false, + ); kept_import_equals = kept_import_equals or result.kept_import_equals; removed_import_equals = removed_import_equals or result.removed_import_equals; part.import_record_indices = part.import_record_indices; @@ -13282,6 +13387,341 @@ pub fn NewParser( }, logger.Loc.Empty, ); + } else if (p.options.features.hot_module_reloading) { + var named_exports_count: usize = p.named_exports.count(); + + // To transform to something HMR'able, we must: + // 1. Wrap the top level code in an IIFE + // 2. Move imports to the top of the file (preserving the order) + // 3. Remove export clauses (done during ImportScanner) + // 4. Move export * from and export from to the bottom of the file (or the top, it doesn't matter I don't think) + // 5. Export everything as getters in our HMR module + // 6. Call the HMRModule's exportAll function like so: + // __hmrModule.exportAll({ + // exportAlias: () => identifier, + // exportAlias: () => identifier, + // }); + // This has the unfortunate property of making property accesses of exports slower at runtime. + // But, I'm not sure there's a way to use regular properties without breaking stuff. + var imports_count: usize = 0; + // We have to also move export from, since we will preserve those + var exports_from_count: usize = 0; + // Two passes. First pass just counts. + for (parts[parts.len - 1].stmts) |stmt, i| { + imports_count += switch (stmt.data) { + .s_import => @as(usize, 1), + else => @as(usize, 0), + }; + exports_from_count += switch (stmt.data) { + .s_export_star, .s_export_from => @as(usize, 1), + else => @as(usize, 0), + }; + } + var part = &parts[parts.len - 1]; + + const end_iife_stmts_count = part.stmts.len - imports_count - exports_from_count + 1; + // Why 7? + // 1. HMRClient.activate(${isDebug}); + // 2. var __hmrModule = new HMMRModule(id, file_path), __exports = __hmrModule.exports; + // 3. (__hmrModule.load = function() { + // ${end_iffe_stmts_count - 1} + // ${end_iffe_stmts_count} + // __hmrModule.exportAll({exportAlias: () => identifier}) <-- ${named_exports_count} + // (); + // 4. var __hmrExport_exportName = __hmrModule.exports.exportName, + // 5. export { __hmrExport_exportName as blah, ... } + // 6. __hmrModule.onSetExports = (newExports) => { + // $named_exports_count __hmrExport_exportName = newExports.exportName; <-- ${named_exports_count} + // } + var _stmts = p.allocator.alloc( + Stmt, + end_iife_stmts_count + 6 + (named_exports_count * 2) + imports_count + exports_from_count, + ) catch unreachable; + // Normally, we'd have to grow that inner function's stmts list by one + // But we can avoid that by just making them all use this same array. + var curr_stmts = _stmts; + // Second pass: move any imports from the part's stmts array to the new stmts + var imports_list = curr_stmts[0..imports_count]; + curr_stmts = curr_stmts[imports_list.len..]; + var toplevel_stmts = curr_stmts[0..6]; + curr_stmts = curr_stmts[toplevel_stmts.len..]; + var exports_from = curr_stmts[0..exports_from_count]; + curr_stmts = curr_stmts[exports_from.len..]; + var stmts_for_top_part = _stmts[0 .. imports_list.len + toplevel_stmts.len + exports_from.len]; + // This is used for onSetExports + var update_function_stmts = curr_stmts[0..named_exports_count]; + curr_stmts = curr_stmts[update_function_stmts.len..]; + var export_all_function_body_stmts = curr_stmts[0..named_exports_count]; + curr_stmts = curr_stmts[export_all_function_body_stmts.len..]; + // This is the original part statements + 1 + var part_stmts = curr_stmts; + std.debug.assert(part_stmts.len == end_iife_stmts_count); + + if (imports_list.len > 0 or exports_from.len > 0) { + var import_list_i: usize = 0; + var part_stmts_i: usize = 0; + var export_list_i: usize = 0; + for (part.stmts) |stmt, i| { + switch (stmt.data) { + .s_import => { + imports_list[import_list_i] = stmt; + import_list_i += 1; + }, + .s_export_star, .s_export_from => { + exports_from[export_list_i] = stmt; + export_list_i += 1; + }, + else => { + part_stmts[part_stmts_i] = stmt; + part_stmts_i += 1; + }, + } + } + } + + var args_list: []Expr = if (isDebug) &Prefill.HotModuleReloading.DebugEnabledArgs else &Prefill.HotModuleReloading.DebugDisabled; + var call_args = try p.allocator.alloc(Expr, 3); + var new_call_args = call_args[0..2]; + var hmr_module_ident = p.e(E.Identifier{ .ref = p.hmr_module_ref }, logger.Loc.Empty); + + new_call_args[0] = p.e(E.Number{ .value = @intToFloat(f64, p.options.filepath_hash_for_hmr) }, logger.Loc.Empty); + // This helps us provide better error messages + new_call_args[1] = p.e(E.String{ .utf8 = p.source.path.pretty }, logger.Loc.Empty); + var exports_dot = p.e(E.Dot{ + .target = hmr_module_ident, + .name = ExportsStringName, + .name_loc = logger.Loc.Empty, + }, logger.Loc.Empty); + var hmr_module_class_ident = p.e(E.Identifier{ .ref = p.runtime_imports.__HMRClient.? }, logger.Loc.Empty); + // HMRClient.activate(true) + toplevel_stmts[0] = p.s( + S.SExpr{ + .value = p.e(E.Call{ + .target = p.e(E.Dot{ + .target = hmr_module_class_ident, + .name = "activate", + .name_loc = logger.Loc.Empty, + }, logger.Loc.Empty), + + .args = args_list, + }, logger.Loc.Empty), + }, + logger.Loc.Empty, + ); + var decls = try p.allocator.alloc(G.Decl, 2 + named_exports_count); + var first_decl = decls[0..2]; + // We cannot rely on import.meta.url because if we import it within a blob: url, it will be nonsensical + // var __hmrModule = new HMRModule(123123124, "/index.js"), __exports = __hmrModule.exports; + + first_decl[0] = G.Decl{ + .binding = p.b(B.Identifier{ .ref = p.hmr_module_ref }, logger.Loc.Empty), + .value = p.e(E.New{ + .args = new_call_args, + .target = p.e(E.Identifier{ .ref = p.runtime_imports.__HMRModule.? }, logger.Loc.Empty), + }, logger.Loc.Empty), + }; + first_decl[1] = G.Decl{ + .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty), + .value = p.e(E.Dot{ + .target = p.e(E.Identifier{ .ref = p.hmr_module_ref }, logger.Loc.Empty), + .name = "exports", + .name_loc = logger.Loc.Empty, + }, logger.Loc.Empty), + }; + + var export_clauses = try p.allocator.alloc(js_ast.ClauseItem, named_exports_count); + var named_export_i: usize = 0; + var decl_i: usize = 1; + var named_exports_iter = p.named_exports.iterator(); + var export_properties = try p.allocator.alloc(G.Property, named_exports_count); + + var export_name_string_length: usize = 0; + while (named_exports_iter.next()) |named_export| { + export_name_string_length += named_export.key_ptr.len + "$$hmr_".len; + } + + var export_name_string_all = try p.allocator.alloc(u8, export_name_string_length); + var export_name_string_remainder = export_name_string_all; + var hmr_module_exports_dot = p.e( + E.Dot{ + .target = hmr_module_ident, + .name = "exports", + .name_loc = logger.Loc.Empty, + }, + logger.Loc.Empty, + ); + var exports_decls = decls[first_decl.len..]; + named_exports_iter = p.named_exports.iterator(); + var update_function_args = try p.allocator.alloc(G.Arg, 1); + var exports_ident = p.e(E.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty); + update_function_args[0] = G.Arg{ .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty) }; + + while (named_exports_iter.next()) |named_export| { + var export_name_string = export_name_string_remainder[0 .. named_export.key_ptr.len + "$$hmr_".len]; + export_name_string_remainder = export_name_string_remainder[export_name_string.len..]; + std.mem.copy(u8, export_name_string, "$$hmr_"); + std.mem.copy(u8, export_name_string["$$hmr_".len..], named_export.key_ptr.*); + + var name_ref = try p.declareSymbol(.other, logger.Loc.Empty, export_name_string); + + var body_stmts = export_all_function_body_stmts[named_export_i .. named_export_i + 1]; + body_stmts[0] = p.s( + S.Return{ .value = p.e(E.Identifier{ + .ref = named_export.value_ptr.ref, + }, logger.Loc.Empty) }, + logger.Loc.Empty, + ); + export_clauses[named_export_i] = js_ast.ClauseItem{ + .original_name = "", + .alias = named_export.key_ptr.*, + .alias_loc = named_export.value_ptr.alias_loc, + .name = .{ .ref = name_ref, .loc = logger.Loc.Empty }, + }; + + var decl_value = p.e( + E.Dot{ .target = hmr_module_exports_dot, .name = named_export.key_ptr.*, .name_loc = logger.Loc.Empty }, + logger.Loc.Empty, + ); + exports_decls[named_export_i] = G.Decl{ + .binding = p.b(B.Identifier{ .ref = name_ref }, logger.Loc.Empty), + .value = decl_value, + }; + + update_function_stmts[named_export_i] = Expr.assignStmt( + p.e( + E.Identifier{ .ref = name_ref }, + logger.Loc.Empty, + ), + p.e(E.Dot{ + .target = exports_ident, + .name = named_export.key_ptr.*, + .name_loc = logger.Loc.Empty, + }, logger.Loc.Empty), + p.allocator, + ); + + export_properties[named_export_i] = G.Property{ + .key = p.e(E.String{ .utf8 = named_export.key_ptr.* }, logger.Loc.Empty), + .value = p.e( + E.Arrow{ + .args = &[_]G.Arg{}, + .body = .{ + .stmts = body_stmts, + .loc = logger.Loc.Empty, + }, + .prefer_expr = true, + }, + logger.Loc.Empty, + ), + }; + named_export_i += 1; + } + var export_all_args = call_args[new_call_args.len..]; + export_all_args[0] = p.e( + E.Object{ .properties = export_properties }, + logger.Loc.Empty, + ); + + part_stmts[part_stmts.len - 1] = p.s( + S.SExpr{ + .value = p.e( + E.Call{ + .target = p.e( + E.Dot{ + .target = hmr_module_ident, + .name = "exportAll", + .name_loc = logger.Loc.Empty, + }, + logger.Loc.Empty, + ), + .args = export_all_args, + }, + logger.Loc.Empty, + ), + }, + logger.Loc.Empty, + ); + + toplevel_stmts[1] = p.s( + S.Local{ + .decls = first_decl, + }, + logger.Loc.Empty, + ); + + var func = p.e( + E.Function{ + .func = .{ + .body = .{ .loc = logger.Loc.Empty, .stmts = part_stmts }, + .name = null, + .open_parens_loc = logger.Loc.Empty, + .flags = .{ + .print_as_iife = true, + }, + }, + }, + logger.Loc.Empty, + ); + + // (__hmrModule._load = function())() + toplevel_stmts[2] = p.s( + S.SExpr{ .value = p.e( + E.Call{ + .target = Expr.assign(p.e( + E.Dot{ + .name = "_load", + .target = hmr_module_ident, + .name_loc = logger.Loc.Empty, + }, + logger.Loc.Empty, + ), func, p.allocator), + }, + logger.Loc.Empty, + ) }, + logger.Loc.Empty, + ); + + toplevel_stmts[3] = p.s( + S.Local{ + .decls = exports_decls, + }, + logger.Loc.Empty, + ); + toplevel_stmts[4] = p.s( + S.SExpr{ + .value = Expr.assign( + p.e( + E.Dot{ + .name = "_update", + .target = hmr_module_ident, + .name_loc = logger.Loc.Empty, + }, + logger.Loc.Empty, + ), + p.e( + E.Function{ + .func = .{ + .body = .{ .loc = logger.Loc.Empty, .stmts = update_function_stmts }, + .name = null, + .args = update_function_args, + .open_parens_loc = logger.Loc.Empty, + }, + }, + logger.Loc.Empty, + ), + p.allocator, + ), + }, + logger.Loc.Empty, + ); + toplevel_stmts[5] = p.s( + S.ExportClause{ + .items = export_clauses, + }, + logger.Loc.Empty, + ); + + part.stmts = stmts_for_top_part; } { @@ -13437,6 +13877,7 @@ pub fn NewParser( .needs_jsx_import = if (only_scan_imports_and_do_not_visit) false else NeedsJSXType{}, .lexer = lexer, + .hmr_exports_list = @TypeOf(this.hmr_exports_list).init(allocator), }; if (!only_scan_imports_and_do_not_visit) { diff --git a/src/js_printer.zig b/src/js_printer.zig index f45feea91..983e46b6b 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -3701,6 +3701,56 @@ const FileWriterInternal = struct { ) anyerror!void {} }; +pub const BufferWriter = struct { + buffer: MutableString = undefined, + written: []const u8 = "", + + pub fn init(allocator: *std.mem.Allocator) !BufferWriter { + return BufferWriter{ + .buffer = MutableString.init( + allocator, + 0, + ) catch unreachable, + }; + } + pub fn writeByte(ctx: *BufferWriter, byte: u8) anyerror!usize { + try ctx.buffer.appendChar(byte); + return 1; + } + pub fn writeAll(ctx: *BufferWriter, bytes: anytype) anyerror!usize { + try ctx.buffer.append(bytes); + return bytes.len; + } + + pub fn getLastByte(ctx: *const BufferWriter) u8 { + return if (ctx.buffer.list.items.len > 0) ctx.buffer.list.items[ctx.buffer.list.items.len - 1] else 0; + } + + pub fn getLastLastByte(ctx: *const BufferWriter) u8 { + return if (ctx.buffer.list.items.len > 1) ctx.buffer.list.items[ctx.buffer.list.items.len - 2] else 0; + } + + pub fn reset(ctx: *BufferWriter) void { + ctx.buffer.reset(); + } + + pub fn done( + ctx: *BufferWriter, + ) anyerror!void { + ctx.written = ctx.buffer.toOwnedSliceLeaky(); + } + + pub fn flush( + ctx: *BufferWriter, + ) anyerror!void {} +}; +pub const BufferPrinter = NewWriter( + BufferWriter, + BufferWriter.writeByte, + BufferWriter.writeAll, + BufferWriter.getLastByte, + BufferWriter.getLastLastByte, +); pub const FileWriter = NewWriter(FileWriterInternal, FileWriterInternal.writeByte, FileWriterInternal.writeAll, FileWriterInternal.getLastByte, FileWriterInternal.getLastLastByte); pub fn NewFileWriter(file: std.fs.File) FileWriter { var internal = FileWriterInternal.init(file); diff --git a/src/linker.zig b/src/linker.zig index 71f954774..0d6b78e08 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -96,14 +96,19 @@ pub fn NewLinker(comptime BundlerType: type) type { for (result.ast.import_records) |*import_record, _record_index| { const record_index = @truncate(u32, _record_index); if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) { - import_record.path = try linker.generateImportPath( - source_dir, - linker.runtime_source_path, - Runtime.version(), - import_path_format, - ); - result.ast.runtime_import_record_id = record_index; - result.ast.needs_runtime = true; + // runtime is included in the bundle, so we don't need to dynamically import it + if (linker.options.node_modules_bundle) |node_modules_bundle| { + import_record.path.text = node_modules_bundle.bundle.import_from_name; + } else { + import_record.path = try linker.generateImportPath( + source_dir, + linker.runtime_source_path, + Runtime.version(), + import_path_format, + ); + result.ast.runtime_import_record_id = record_index; + result.ast.needs_runtime = true; + } continue; } diff --git a/src/logger.zig b/src/logger.zig index cd58e29d8..9ab04b398 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const Api = @import("./api/schema.zig").Api; usingnamespace @import("global.zig"); @@ -25,6 +26,16 @@ pub const Kind = enum { .verbose => "verbose", }; } + + pub fn toAPI(kind: Kind) Api.MessageKind { + return switch (kind) { + .err => err, + .warn => warn, + .note => note, + .debug => debug, + .verbose => verbose, + }; + } }; pub const Loc = packed struct { @@ -155,6 +166,32 @@ pub const Msg = struct { data: Data, notes: ?[]Data = null, + pub fn toAPI(this: *const Msg, allocator: *std.mem.Allocator) Api.Message { + var msg = Api.Message{ + .kind = this.kind.toAPI(), + .data = this.data.toAPI(), + }; + + if (this.notes) |notes| { + if (notes.len > 0) { + msg.notes = try allocator.alloc(Api.MessageData, notes.len); + for (notes) |note, i| { + msg.notes[i] = note.toAPI(); + } + } + } + + return msg; + } + pub fn toAPIFromList(comptime ListType: type, list: ListType, allocator: *std.mem.Allocator) ![]Api.Message { + var out_list = try allocator.alloc(Api.Msg, list.items.len); + for (list.items) |item, i| { + out_list[i] = try item.toAPI(allocator); + } + + return out_list; + } + pub fn deinit(msg: *Msg, allocator: *std.mem.Allocator) void { msg.data.deinit(allocator); if (msg.notes) |notes| { @@ -231,6 +268,14 @@ pub const Log = struct { msgs: ArrayList(Msg), level: Level = Level.debug, + pub fn toAPI(this: *const Log, allocator: *std.mem.Allocator) !Api.Log { + return Api.Log{ + .warnings = this.warnings, + .errors = this.errors, + .msgs = try Msg.toAPIFromList(@TypeOf(this.msgs), this.msgs, allocator), + }; + } + pub const Level = enum { verbose, debug, diff --git a/src/options.zig b/src/options.zig index 4b45acdf2..799bb9a08 100644 --- a/src/options.zig +++ b/src/options.zig @@ -340,6 +340,18 @@ pub const Loader = enum { file, json, + pub fn toAPI(loader: Loader) Api.Loader { + return switch (loader) { + .jsx => .jsx, + .js => .js, + .ts => .ts, + .tsx => .tsx, + .css => .css, + .json => .json, + else => .file, + }; + } + pub fn isJSX(loader: Loader) bool { return loader == .jsx or loader == .tsx; } @@ -472,6 +484,14 @@ pub const Timings = struct { }; pub const DefaultUserDefines = struct { + pub const HotModuleReloading = struct { + pub const Key = "process.env.SPEEDY_HMR_ENABLED"; + pub const Value = "true"; + }; + pub const HotModuleReloadingVerbose = struct { + pub const Key = "process.env.SPEEDY_HMR_VERBOSE"; + pub const Value = "true"; + }; // This must be globally scoped so it doesn't disappear pub const NodeEnv = struct { pub const Key = "process.env.NODE_ENV"; @@ -479,7 +499,7 @@ pub const DefaultUserDefines = struct { }; }; -pub fn definesFromTransformOptions(allocator: *std.mem.Allocator, log: *logger.Log, _input_define: ?Api.StringMap) !*defines.Define { +pub fn definesFromTransformOptions(allocator: *std.mem.Allocator, log: *logger.Log, _input_define: ?Api.StringMap, hmr: bool) !*defines.Define { var input_user_define = _input_define orelse std.mem.zeroes(Api.StringMap); var user_defines = try stringHashMapFromArrays( @@ -492,6 +512,10 @@ pub fn definesFromTransformOptions(allocator: *std.mem.Allocator, log: *logger.L try user_defines.put(DefaultUserDefines.NodeEnv.Key, DefaultUserDefines.NodeEnv.Value); } + if (hmr) { + try user_defines.put(DefaultUserDefines.HotModuleReloading.Key, DefaultUserDefines.HotModuleReloading.Value); + } + var resolved_defines = try defines.DefineData.from_input(user_defines, log, allocator); return try defines.Define.init( allocator, @@ -595,7 +619,7 @@ pub const BundleOptions = struct { var opts: BundleOptions = BundleOptions{ .log = log, .resolve_mode = transform.resolve orelse .dev, - .define = try definesFromTransformOptions(allocator, log, transform.define), + .define = try definesFromTransformOptions(allocator, log, transform.define, transform.serve orelse false), .loaders = try loadersFromTransformOptions(allocator, transform.loaders), .output_dir = try fs.absAlloc(allocator, &output_dir_parts), .platform = Platform.from(transform.platform), diff --git a/src/runtime.zig b/src/runtime.zig index f6a15da98..4fd1354c9 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -23,9 +23,14 @@ pub const Runtime = struct { pub const Features = struct { react_fast_refresh: bool = false, hot_module_reloading: bool = false, + hot_module_reloading_entry: bool = false, keep_names_for_arrow_functions: bool = true, }; + pub const Names = struct { + pub const ActivateFunction = "activate"; + }; + pub const Imports = struct { __name: ?Ref = null, __toModule: ?Ref = null, @@ -37,6 +42,8 @@ pub const Runtime = struct { load_from_bundle: ?Ref = null, register: ?Ref = null, lazy_export: ?Ref = null, + __HMRModule: ?Ref = null, + __HMRClient: ?Ref = null, pub const all = [_][]const u8{ "__name", @@ -51,6 +58,8 @@ pub const Runtime = struct { // "register", "lazy_export", + "__HMRModule", + "__HMRClient", }; pub const Name = "<RUNTIME"; @@ -119,6 +128,16 @@ pub const Runtime = struct { return Entry{ .key = 9, .value = val }; } }, + 10 => { + if (@field(this.runtime_imports, all[10])) |val| { + return Entry{ .key = 10, .value = val }; + } + }, + 11 => { + if (@field(this.runtime_imports, all[11])) |val| { + return Entry{ .key = 11, .value = val }; + } + }, else => { return null; @@ -174,6 +193,8 @@ pub const Runtime = struct { 7 => @field(imports, all[7]), 8 => @field(imports, all[8]), 9 => @field(imports, all[9]), + 10 => @field(imports, all[10]), + 11 => @field(imports, all[11]), else => null, }; } diff --git a/src/runtime/hmr.ts b/src/runtime/hmr.ts index f6d540b5a..10afeb8ea 100644 --- a/src/runtime/hmr.ts +++ b/src/runtime/hmr.ts @@ -1,14 +1,15 @@ import { ByteBuffer } from "peechy/bb"; -import * as Schema from "../api/schema"; +import * as API from "../api/schema"; var runOnce = false; var clientStartTime = 0; function formatDuration(duration: number) { - return Math.round(duration * 100000) / 100; + return Math.round(duration * 1000) / 1000; } -export class Client { +class HMRClient { + static client: HMRClient; socket: WebSocket; hasWelcomed: boolean = false; reconnect: number = 0; @@ -18,8 +19,8 @@ export class Client { start() { if (runOnce) { - console.warn( - "[speedy] Attempted to start HMR client multiple times. This may be a bug." + __hmrlog.warn( + "Attempted to start HMR client multiple times. This may be a bug." ); return; } @@ -30,10 +31,12 @@ export class Client { connect() { clientStartTime = performance.now(); - - this.socket = new WebSocket("/_api", ["speedy-hmr"]); + const baseURL = new URL(location.origin + "/_api"); + baseURL.protocol = location.protocol === "https" ? "wss" : "ws"; + this.socket = new WebSocket(baseURL.toString(), ["speedy-hmr"]); this.socket.binaryType = "arraybuffer"; this.socket.onclose = this.handleClose; + this.socket.onerror = this.handleError; this.socket.onopen = this.handleOpen; this.socket.onmessage = this.handleMessage; } @@ -43,7 +46,18 @@ export class Client { builds = new Map<number, number>(); indexOfModuleId(id: number): number { - return Module.dependencies.graph.indexOf(id); + return HMRModule.dependencies.graph.indexOf(id); + } + + static activate(verbose: boolean = false) { + if (this.client) { + return; + } + + this.client = new HMRClient(); + this.client.verbose = verbose; + this.client.start(); + globalThis["SPEEDY_HMR"] = this.client; } handleBuildFailure(buffer: ByteBuffer, timestamp: number) { @@ -65,12 +79,21 @@ export class Client { if (!minTimestamp) { return; } - const fail = Schema.decodeWebsocketMessageBuildFailure(buffer); + const fail = API.decodeWebsocketMessageBuildFailure(buffer); // TODO: finish this. - console.error("[speedy] Build failed", fail.module_path); + __hmrlog.error("Build failed", fail.module_path); } - verbose = process.env.SPEEDY_HMR_VERBOSE; + verbose = false; + + handleError = (error: ErrorEvent) => { + __hmrlog.error("Websocket error", error.error); + if (this.reconnect !== 0) { + return; + } + + this.reconnect = setInterval(this.connect, 500) as any as number; + }; handleBuildSuccess(buffer: ByteBuffer, timestamp: number) { // 0: ID @@ -84,8 +107,8 @@ export class Client { // Ignore builds of modules that are not loaded if (index === -1) { if (this.verbose) { - console.debug( - `[speedy] Skipping reload for unknown module id:`, + __hmrlog.debug( + `Skipping reload for unknown module id:`, header_data[0] ); } @@ -97,8 +120,8 @@ export class Client { const currentVersion = this.builds.get(header_data[0]) || -Infinity; if (currentVersion > header_data[1]) { if (this.verbose) { - console.debug( - `[speedy] Ignoring module update for "${Module.dependencies.modules[index].url.pathname}" due to timestamp mismatch.\n Expected: >=`, + __hmrlog.debug( + `Ignoring outdated update for "${HMRModule.dependencies.modules[index].file_path}".\n Expected: >=`, currentVersion, `\n Received:`, header_data[1] @@ -108,24 +131,32 @@ export class Client { } if (this.verbose) { - console.debug( - "[speedy] Preparing to reload", - Module.dependencies.modules[index].url.pathname + __hmrlog.debug( + "Preparing to reload", + HMRModule.dependencies.modules[index].file_path ); } - const build = Schema.decodeWebsocketMessageBuildSuccess(buffer); - var reload = new HotReload(header_data[0], index, build); + const build = API.decodeWebsocketMessageBuildSuccess(buffer); + var reload = new HotReload( + header_data[0], + index, + build, + // These are the bytes!! + buffer.data.length > buffer._index + ? buffer.data.subarray(buffer._index) + : new Uint8Array(0) + ); reload.timings.notify = timestamp - build.from_timestamp; reload.run().then( ([module, timings]) => { - console.log( - `[speedy] Reloaded in ${formatDuration(timings.total)}ms :`, - module.url.pathname + __hmrlog.log( + `Reloaded in ${formatDuration(timings.total)}ms :`, + module.file_path ); }, (err) => { - console.error("[speedy] Hot Module Reload failed!", err); + __hmrlog.error("Hot Module Reload failed!", err); debugger; } ); @@ -133,39 +164,40 @@ export class Client { handleFileChangeNotification(buffer: ByteBuffer, timestamp: number) { const notification = - Schema.decodeWebsocketMessageFileChangeNotification(buffer); - const index = Module.dependencies.graph.indexOf(notification.id); + API.decodeWebsocketMessageFileChangeNotification(buffer); + const index = HMRModule.dependencies.graph.indexOf(notification.id); if (index === -1) { if (this.verbose) { - console.debug("[speedy] Unknown module changed, skipping"); + __hmrlog.debug("Unknown module changed, skipping"); } return; } if ((this.builds.get(notification.id) || -Infinity) > timestamp) { - console.debug( - `[speedy] Received update for ${Module.dependencies.modules[index].url.pathname}` + __hmrlog.debug( + `Received update for ${HMRModule.dependencies.modules[index].file_path}` ); return; } if (this.verbose) { - console.debug( - `[speedy] Requesting update for ${Module.dependencies.modules[index].url.pathname}` + __hmrlog.debug( + `Requesting update for ${HMRModule.dependencies.modules[index].file_path}` ); } this.builds.set(notification.id, timestamp); - this.buildCommandBuf[0] = Schema.WebsocketCommandKind.build; + this.buildCommandBuf[0] = API.WebsocketCommandKind.build; this.buildCommandUArray[0] = timestamp; - this.buildCommandBuf.set(new Uint8Array(this.buildCommandUArray), 1); + this.buildCommandBuf.set(this.buildCommandUArrayEight, 1); this.buildCommandUArray[0] = notification.id; - this.buildCommandBuf.set(new Uint8Array(this.buildCommandUArray), 5); + this.buildCommandBuf.set(this.buildCommandUArrayEight, 5); this.socket.send(this.buildCommandBuf); } buildCommandBuf = new Uint8Array(9); buildCommandUArray = new Uint32Array(1); + buildCommandUArrayEight = new Uint8Array(this.buildCommandUArray.buffer); handleOpen = (event: Event) => { globalThis.clearInterval(this.reconnect); @@ -175,37 +207,37 @@ export class Client { handleMessage = (event: MessageEvent) => { const data = new Uint8Array(event.data); const message_header_byte_buffer = new ByteBuffer(data); - const header = Schema.decodeWebsocketMessage(message_header_byte_buffer); + const header = API.decodeWebsocketMessage(message_header_byte_buffer); const buffer = new ByteBuffer( data.subarray(message_header_byte_buffer._index) ); switch (header.kind) { - case Schema.WebsocketMessageKind.build_fail: { + case API.WebsocketMessageKind.build_fail: { this.handleBuildFailure(buffer, header.timestamp); break; } - case Schema.WebsocketMessageKind.build_success: { + case API.WebsocketMessageKind.build_success: { this.handleBuildSuccess(buffer, header.timestamp); break; } - case Schema.WebsocketMessageKind.file_change_notification: { + case API.WebsocketMessageKind.file_change_notification: { this.handleFileChangeNotification(buffer, header.timestamp); break; } - case Schema.WebsocketMessageKind.welcome: { + case API.WebsocketMessageKind.welcome: { const now = performance.now(); - console.log( - "[speedy] HMR connected in", + __hmrlog.log( + "HMR connected in", formatDuration(now - clientStartTime), "ms" ); clientStartTime = now; this.hasWelcomed = true; - const welcome = Schema.decodeWebsocketMessageWelcome(buffer); + const welcome = API.decodeWebsocketMessageWelcome(buffer); this.epoch = welcome.epoch; if (!this.epoch) { - console.warn("[speedy] Internal HMR error"); + __hmrlog.warn("Internal HMR error"); } break; } @@ -218,14 +250,16 @@ export class Client { } this.reconnect = setInterval(this.connect, 500) as any as number; - console.warn("[speedy] HMR disconnected. Attempting to reconnect."); + __hmrlog.warn("HMR disconnected. Attempting to reconnect."); }; } +export { HMRClient as __HMRClient }; + class HotReload { module_id: number = 0; module_index: number = 0; - build: Schema.WebsocketMessageBuildSuccess; + build: API.WebsocketMessageBuildSuccess; timings = { notify: 0, decode: 0, @@ -234,39 +268,51 @@ class HotReload { total: 0, start: 0, }; + static VERBOSE = false; + bytes: Uint8Array; constructor( module_id: HotReload["module_id"], module_index: HotReload["module_index"], - build: HotReload["build"] + build: HotReload["build"], + bytes: Uint8Array ) { this.module_id = module_id; this.module_index = module_index; this.build = build; + this.bytes = bytes; } - async run(): Promise<[Module, HotReload["timings"]]> { + async run(): Promise<[HMRModule, HotReload["timings"]]> { const importStart = performance.now(); - let orig_deps = Module.dependencies; - Module.dependencies = orig_deps.fork(this.module_index); + let orig_deps = HMRModule.dependencies; + // we must preserve the updater since that holds references to the real exports. + // this is a fundamental limitation of using esmodules for HMR. + // we cannot export new modules. we can only mutate existing ones. + + HMRModule.dependencies = orig_deps.fork(this.module_index); var blobURL = null; try { - const blob = new Blob([this.build.bytes], { type: "text/javascript" }); + const blob = new Blob([this.bytes], { type: "text/javascript" }); blobURL = URL.createObjectURL(blob); await import(blobURL); this.timings.import = performance.now() - importStart; } catch (exception) { - Module.dependencies = orig_deps; + HMRModule.dependencies = orig_deps; URL.revokeObjectURL(blobURL); + // Ensure we don't keep the bytes around longer than necessary + this.bytes = null; throw exception; } URL.revokeObjectURL(blobURL); + // Ensure we don't keep the bytes around longer than necessary + this.bytes = null; - if (process.env.SPEEDY_HMR_VERBOSE) { - console.debug( - "[speedy] Re-imported", - Module.dependencies.modules[this.module_index].url.pathname, + if (HotReload.VERBOSE) { + __hmrlog.debug( + "Re-imported", + HMRModule.dependencies.modules[this.module_index].file_path, "in", formatDuration(this.timings.import), ". Running callbacks" @@ -277,37 +323,41 @@ class HotReload { try { // ES Modules delay execution until all imports are parsed // They execute depth-first - // If you load N modules and append each module ID to the array, 0 is the *last* module imported. + // If you load N modules and append each module ID to the array, 0 is the *last* unique module imported. // modules.length - 1 is the first. // Therefore, to reload all the modules in the correct order, we traverse the graph backwards // This only works when the graph is up to date. // If the import order changes, we need to regenerate the entire graph // Which sounds expensive, until you realize that we are mostly talking about an array that will be typically less than 1024 elements - // Computers can do that in < 1ms easy! - for (let i = Module.dependencies.graph_used; i > this.module_index; i--) { - let handled = !Module.dependencies.modules[i].exports.__hmrDisable; - if (typeof Module.dependencies.modules[i].dispose === "function") { - Module.dependencies.modules[i].dispose(); + // Computers can create an array of < 1024 pointer-sized elements in < 1ms easy! + for ( + let i = HMRModule.dependencies.graph_used; + i > this.module_index; + i-- + ) { + let handled = !HMRModule.dependencies.modules[i].exports.__hmrDisable; + if (typeof HMRModule.dependencies.modules[i].dispose === "function") { + HMRModule.dependencies.modules[i].dispose(); handled = true; } - if (typeof Module.dependencies.modules[i].accept === "function") { - Module.dependencies.modules[i].accept(); + if (typeof HMRModule.dependencies.modules[i].accept === "function") { + HMRModule.dependencies.modules[i].accept(); handled = true; } if (!handled) { - Module.dependencies.modules[i]._load(); + HMRModule.dependencies.modules[i]._load(); } } } catch (exception) { - Module.dependencies = orig_deps; + HMRModule.dependencies = orig_deps; throw exception; } this.timings.callbacks = performance.now() - callbacksStart; - if (process.env.SPEEDY_HMR_VERBOSE) { - console.debug( - "[speedy] Ran callbacks", - Module.dependencies.modules[this.module_index].url.pathname, + if (HotReload.VERBOSE) { + __hmrlog.debug( + "Ran callbacks", + HMRModule.dependencies.modules[this.module_index].file_path, "in", formatDuration(this.timings.callbacks), "ms" @@ -318,74 +368,110 @@ class HotReload { this.timings.total = this.timings.import + this.timings.callbacks + this.build.from_timestamp; return Promise.resolve([ - Module.dependencies.modules[this.module_index], + HMRModule.dependencies.modules[this.module_index], this.timings, ]); } } -var client: Client; -if ("SPEEDY_HMR_CLIENT" in globalThis) { - console.warn( - "[speedy] Attempted to load multiple copies of HMR. This may be a bug." - ); -} else if (process.env.SPEEDY_HMR_ENABLED) { - client = new Client(); - client.start(); - globalThis.SPEEDY_HMR_CLIENT = client; -} -export class Module { - constructor(id: number, url: URL) { - // Ensure V8 knows this is a U32 - this.id = id | 0; - this.url = url; +class HMRModule { + constructor(id: number, file_path: string) { + this.id = id; + this.file_path = file_path; - if (!Module._dependencies) { - Module.dependencies = Module._dependencies; + if (!HMRModule.dependencies) { + HMRModule.dependencies = HMRModule._dependencies; } - this.graph_index = Module.dependencies.graph_used++; + this.graph_index = HMRModule.dependencies.graph_used++; // Grow the dependencies graph - if (Module.dependencies.graph.length <= this.graph_index) { - const new_graph = new Uint32Array(Module.dependencies.graph.length * 4); - new_graph.set(Module.dependencies.graph); - Module.dependencies.graph = new_graph; + if (HMRModule.dependencies.graph.length <= this.graph_index) { + const new_graph = new Uint32Array( + HMRModule.dependencies.graph.length * 4 + ); + new_graph.set(HMRModule.dependencies.graph); + HMRModule.dependencies.graph = new_graph; // In-place grow. This creates a holey array, which is bad, but less bad than pushing potentially 1000 times - Module.dependencies.modules.length = new_graph.length; + HMRModule.dependencies.modules.length = new_graph.length; + } + + if ( + typeof HMRModule.dependencies.modules[this.graph_index] === "object" && + HMRModule.dependencies.modules[this.graph_index] instanceof HMRModule && + HMRModule.dependencies.modules[this.graph_index].id === id && + typeof HMRModule.dependencies.modules[this.graph_index]._update === + "function" + ) { + this.additional_updaters.push( + HMRModule.dependencies.modules[this.graph_index]._update + ); } - Module.dependencies.modules[this.graph_index] = this; - Module.dependencies.graph[this.graph_index] = this.id | 0; + HMRModule.dependencies.modules[this.graph_index] = this; + HMRModule.dependencies.graph[this.graph_index] = this.id; } + additional_files = []; + additional_updaters = []; + _update: (exports: Object) => void; + update() { + for (let update of this.additional_updaters) { + update(this.exports); + } + + this._update(this.exports); + } - // When a module updates, we need to re-initialize each dependent, recursively - // To do so: - // 1. Track which modules are imported by which *at runtime* - // 2. When A updates, loop through each dependent of A in insertion order - // 3. For each old dependent, call .dispose() if exists - // 3. For each new dependent, call .accept() if exists - // 4. static _dependencies = { - modules: new Array<Module>(32), + modules: new Array<HMRModule>(32), graph: new Uint32Array(32), graph_used: 0, fork(offset: number) { return { - modules: Module._dependencies.modules.slice(), - graph: Module._dependencies.graph.slice(), + modules: HMRModule._dependencies.modules.slice(), + graph: HMRModule._dependencies.graph.slice(), graph_used: offset - 1, }; }, }; - static dependencies: Module["_dependencies"]; - url: URL; + + exportAll(object: Object) { + // object[alias] must be a function + for (let alias in object) { + this._exports[alias] = object[alias]; + Object.defineProperty(this.exports, alias, { + get: this._exports[alias], + configurable: true, + enumerable: true, + }); + } + } + + static dependencies: HMRModule["_dependencies"]; + file_path: string; _load = function () {}; id = 0; graph_index = 0; _exports = {}; exports = {}; } + +var __hmrlog = { + debug(...args) { + console.debug("[speedy]", ...args); + }, + error(...args) { + console.error("[speedy]", ...args); + }, + log(...args) { + console.log("[speedy]", ...args); + }, + warn(...args) { + console.warn("[speedy]", ...args); + }, +}; + +export { HMRModule as __HMRModule }; diff --git a/src/string_mutable.zig b/src/string_mutable.zig index 5dbc687e1..1b63bb9e8 100644 --- a/src/string_mutable.zig +++ b/src/string_mutable.zig @@ -23,6 +23,11 @@ pub const MutableString = struct { try self.list.ensureUnusedCapacity(self.allocator, amount); } + pub fn write(self: *MutableString, bytes: anytype) !usize { + try self.list.appendSlice(self.allocator, bytes); + return bytes.len; + } + pub fn writeAll(self: *MutableString, bytes: string) !usize { try self.list.appendSlice(self.allocator, bytes); return self.list.items.len; diff --git a/src/watcher.zig b/src/watcher.zig index 406703602..6a430a32c 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -2,6 +2,7 @@ const Fs = @import("./fs.zig"); const std = @import("std"); usingnamespace @import("global.zig"); const sync = @import("sync.zig"); +const options = @import("./options.zig"); const os = std.os; const KEvent = std.os.Kevent; @@ -11,6 +12,7 @@ pub const WatchItem = struct { // filepath hash for quick comparison hash: u32, eventlist_index: u32, + loader: options.Loader, fd: StoredFileDescriptorType, }; @@ -68,6 +70,10 @@ pub fn NewWatcher(comptime ContextType: type) type { allocator: *std.mem.Allocator, watchloop_handle: ?u64 = null, + pub fn getHash(filepath: string) u32 { + return @truncate(u32, std.hash.Wyhash.hash(0, filepath)); + } + pub fn init(ctx: ContextType, fs: *Fs.FileSystem, allocator: *std.mem.Allocator) !*Watcher { var watcher = try allocator.create(Watcher); watcher.* = Watcher{ @@ -155,9 +161,7 @@ pub fn NewWatcher(comptime ContextType: type) type { this.ctx.onFileUpdate(watchevents, this.watchlist); } } - pub fn getHash(filepath: string) u32 { - return @truncate(u32, std.hash.Wyhash.hash(0, filepath)); - } + pub fn indexOf(this: *Watcher, hash: u32) ?usize { for (this.watchlist.items(.hash)) |other, i| { if (hash == other) { @@ -172,6 +176,7 @@ pub fn NewWatcher(comptime ContextType: type) type { fd: StoredFileDescriptorType, file_path: string, hash: u32, + loader: options.Loader, comptime copy_file_path: bool, ) !void { if (this.indexOf(hash) != null) { @@ -222,6 +227,7 @@ pub fn NewWatcher(comptime ContextType: type) type { .fd = fd, .hash = hash, .eventlist_index = @truncate(u32, index), + .loader = loader, }); if (FeatureFlags.verbose_watcher) { |