aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jarred SUmner <jarred@jarredsumner.com> 2022-01-01 01:53:50 -0800
committerGravatar Jarred SUmner <jarred@jarredsumner.com> 2022-01-01 01:53:50 -0800
commita17088363f16fc72562994ed01555841c7c1b1ac (patch)
tree27cb1685eb1ce676476797f7076dd46e2bac5d9c /src
parent83004f0a0a51658a50499256a874558f26e09a24 (diff)
downloadbun-a17088363f16fc72562994ed01555841c7c1b1ac.tar.gz
bun-a17088363f16fc72562994ed01555841c7c1b1ac.tar.zst
bun-a17088363f16fc72562994ed01555841c7c1b1ac.zip
[bun dev] Improve filesystem watcher & HMR reliability (Linux + a little macOS)
Text editors like Replit save through atomic file updates. In an inotify filesystem watcher (Linux), that appears to be a delete followed by moving the file to the directory. Now when known files are moved into a directory, the watcher sends the file change notification to the browser(s). From there, the browser looks at it's files to determine whether or not Additionally, if an existing HMR connection does not know about a file ID passed to it, it asks the browser to reply with the file path and then starts watching that file. This improves HMR reliabiality if Bun had been restarted but the page hadn't been restarted.
Diffstat (limited to 'src')
-rw-r--r--src/analytics/analytics_thread.zig15
-rw-r--r--src/api/schema.d.ts78
-rw-r--r--src/api/schema.js196
-rw-r--r--src/api/schema.peechy38
-rw-r--r--src/api/schema.zig133
-rw-r--r--src/http.zig261
-rw-r--r--src/http/websocket.zig2
-rw-r--r--src/http_client_async.zig37
-rw-r--r--src/runtime/hmr.ts124
-rw-r--r--src/watcher.zig70
10 files changed, 472 insertions, 482 deletions
diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig
index c77ae4075..02ae90893 100644
--- a/src/analytics/analytics_thread.zig
+++ b/src/analytics/analytics_thread.zig
@@ -321,7 +321,7 @@ var counter: std.atomic.Atomic(u32) = undefined;
fn start() bool {
@setCold(true);
- defer has_loaded = true;
+ has_loaded = true;
counter = std.atomic.Atomic(u32).init(0);
event_queue = EventQueue.init(std.heap.c_allocator);
@@ -352,23 +352,26 @@ const header_entry = Headers.Kv{
},
};
+var out_buffer: MutableString = undefined;
+var event_list: EventList = undefined;
fn readloop() anyerror!void {
defer disabled = true;
Output.Source.configureNamedThread(thread, "Analytics");
defer Output.flush();
- var event_list = try default_allocator.create(EventList);
- event_list.* = EventList.init();
+
+ event_list = EventList.init();
var headers_entries: Headers.Entries = Headers.Entries{};
headers_entries.append(default_allocator, header_entry) catch unreachable;
+ out_buffer = try MutableString.init(default_allocator, 64);
event_list.async_http = HTTP.AsyncHTTP.init(
default_allocator,
.POST,
URL.parse(Environment.analytics_url),
headers_entries,
headers_buf,
- &event_list.out_buffer,
+ &out_buffer,
&event_list.in_buffer,
std.time.ns_per_ms * 10000,
) catch return;
@@ -395,7 +398,6 @@ pub const EventList = struct {
events: std.ArrayList(Event),
async_http: HTTP.AsyncHTTP,
- out_buffer: MutableString,
in_buffer: MutableString,
pub fn init() EventList {
@@ -405,7 +407,6 @@ pub const EventList = struct {
.events = std.ArrayList(Event).init(default_allocator),
.in_buffer = MutableString.init(default_allocator, 1024) catch unreachable,
.async_http = undefined,
- .out_buffer = MutableString.init(default_allocator, 0) catch unreachable,
};
}
@@ -510,7 +511,7 @@ pub const EventList = struct {
disabled = disabled or stuck_count > 4;
this.in_buffer.reset();
- this.out_buffer.reset();
+out_buffer.reset();
if (comptime FeatureFlags.verbose_analytics) {
Output.prettyErrorln("[Analytics] Sent {d} events", .{count});
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index d00dcfa6e..273f8d9d3 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -231,6 +231,8 @@ export enum WebsocketMessageKind {
build_fail = 4,
manifest_success = 5,
manifest_fail = 6,
+ resolve_file = 7,
+ file_change_notification_with_hint = 8,
}
export const WebsocketMessageKindKeys = {
1: "welcome",
@@ -245,16 +247,23 @@ export const WebsocketMessageKindKeys = {
manifest_success: "manifest_success",
6: "manifest_fail",
manifest_fail: "manifest_fail",
+ 7: "resolve_file",
+ resolve_file: "resolve_file",
+ 8: "file_change_notification_with_hint",
+ file_change_notification_with_hint: "file_change_notification_with_hint",
};
export enum WebsocketCommandKind {
build = 1,
manifest = 2,
+ build_with_file_path = 3,
}
export const WebsocketCommandKindKeys = {
1: "build",
build: "build",
2: "manifest",
manifest: "manifest",
+ 3: "build_with_file_path",
+ build_with_file_path: "build_with_file_path",
};
export interface StackFrame {
function_name: string;
@@ -575,37 +584,13 @@ export interface WebsocketMessageBuildFailure {
log: Log;
}
-export interface DependencyManifest {
- ids: Uint32Array;
-}
-
-export interface FileList {
- ptrs: StringPointer[];
- files: string;
-}
-
-export interface WebsocketMessageResolveIDs {
- id: Uint32Array;
- list: FileList;
-}
-
-export interface WebsocketCommandResolveIDs {
- ptrs: StringPointer[];
- files: string;
-}
-
-export interface WebsocketMessageManifestSuccess {
+export interface WebsocketCommandBuildWithFilePath {
id: uint32;
- module_path: string;
- loader: Loader;
- manifest: DependencyManifest;
+ file_path: string;
}
-export interface WebsocketMessageManifestFailure {
+export interface WebsocketMessageResolveID {
id: uint32;
- from_timestamp: uint32;
- loader: Loader;
- log: Log;
}
export declare function encodeStackFrame(
@@ -860,40 +845,17 @@ export declare function encodeWebsocketMessageBuildFailure(
export declare function decodeWebsocketMessageBuildFailure(
buffer: ByteBuffer
): WebsocketMessageBuildFailure;
-export declare function encodeDependencyManifest(
- message: DependencyManifest,
- bb: ByteBuffer
-): void;
-export declare function decodeDependencyManifest(
- buffer: ByteBuffer
-): DependencyManifest;
-export declare function encodeFileList(message: FileList, bb: ByteBuffer): void;
-export declare function decodeFileList(buffer: ByteBuffer): FileList;
-export declare function encodeWebsocketMessageResolveIDs(
- message: WebsocketMessageResolveIDs,
- bb: ByteBuffer
-): void;
-export declare function decodeWebsocketMessageResolveIDs(
- buffer: ByteBuffer
-): WebsocketMessageResolveIDs;
-export declare function encodeWebsocketCommandResolveIDs(
- message: WebsocketCommandResolveIDs,
- bb: ByteBuffer
-): void;
-export declare function decodeWebsocketCommandResolveIDs(
- buffer: ByteBuffer
-): WebsocketCommandResolveIDs;
-export declare function encodeWebsocketMessageManifestSuccess(
- message: WebsocketMessageManifestSuccess,
+export declare function encodeWebsocketCommandBuildWithFilePath(
+ message: WebsocketCommandBuildWithFilePath,
bb: ByteBuffer
): void;
-export declare function decodeWebsocketMessageManifestSuccess(
+export declare function decodeWebsocketCommandBuildWithFilePath(
buffer: ByteBuffer
-): WebsocketMessageManifestSuccess;
-export declare function encodeWebsocketMessageManifestFailure(
- message: WebsocketMessageManifestFailure,
+): WebsocketCommandBuildWithFilePath;
+export declare function encodeWebsocketMessageResolveID(
+ message: WebsocketMessageResolveID,
bb: ByteBuffer
): void;
-export declare function decodeWebsocketMessageManifestFailure(
+export declare function decodeWebsocketMessageResolveID(
buffer: ByteBuffer
-): WebsocketMessageManifestFailure;
+): WebsocketMessageResolveID;
diff --git a/src/api/schema.js b/src/api/schema.js
index dc89e2b11..8b2043e9c 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -2369,12 +2369,16 @@ const WebsocketMessageKind = {
4: 4,
5: 5,
6: 6,
+ 7: 7,
+ 8: 8,
welcome: 1,
file_change_notification: 2,
build_success: 3,
build_fail: 4,
manifest_success: 5,
manifest_fail: 6,
+ resolve_file: 7,
+ file_change_notification_with_hint: 8,
};
const WebsocketMessageKindKeys = {
1: "welcome",
@@ -2383,24 +2387,32 @@ const WebsocketMessageKindKeys = {
4: "build_fail",
5: "manifest_success",
6: "manifest_fail",
+ 7: "resolve_file",
+ 8: "file_change_notification_with_hint",
welcome: "welcome",
file_change_notification: "file_change_notification",
build_success: "build_success",
build_fail: "build_fail",
manifest_success: "manifest_success",
manifest_fail: "manifest_fail",
+ resolve_file: "resolve_file",
+ file_change_notification_with_hint: "file_change_notification_with_hint",
};
const WebsocketCommandKind = {
1: 1,
2: 2,
+ 3: 3,
build: 1,
manifest: 2,
+ build_with_file_path: 3,
};
const WebsocketCommandKindKeys = {
1: "build",
2: "manifest",
+ 3: "build_with_file_path",
build: "build",
manifest: "manifest",
+ build_with_file_path: "build_with_file_path",
};
function decodeWebsocketMessage(bb) {
@@ -2669,121 +2681,15 @@ function encodeWebsocketMessageBuildFailure(message, bb) {
}
}
-function decodeDependencyManifest(bb) {
- var result = {};
-
- result["ids"] = bb.readUint32ByteArray();
- return result;
-}
-
-function encodeDependencyManifest(message, bb) {
- var value = message["ids"];
- if (value != null) {
- bb.writeUint32ByteArray(value);
- } else {
- throw new Error('Missing required field "ids"');
- }
-}
-
-function decodeFileList(bb) {
- var result = {};
-
- var length = bb.readVarUint();
- var values = (result["ptrs"] = Array(length));
- for (var i = 0; i < length; i++) values[i] = decodeStringPointer(bb);
- result["files"] = bb.readString();
- return result;
-}
-
-function encodeFileList(message, bb) {
- var value = message["ptrs"];
- if (value != null) {
- var values = value,
- n = values.length;
- bb.writeVarUint(n);
- for (var i = 0; i < n; i++) {
- value = values[i];
- encodeStringPointer(value, bb);
- }
- } else {
- throw new Error('Missing required field "ptrs"');
- }
-
- var value = message["files"];
- if (value != null) {
- bb.writeString(value);
- } else {
- throw new Error('Missing required field "files"');
- }
-}
-
-function decodeWebsocketMessageResolveIDs(bb) {
- var result = {};
-
- result["id"] = bb.readUint32ByteArray();
- result["list"] = decodeFileList(bb);
- return result;
-}
-
-function encodeWebsocketMessageResolveIDs(message, bb) {
- var value = message["id"];
- if (value != null) {
- bb.writeUint32ByteArray(value);
- } else {
- throw new Error('Missing required field "id"');
- }
-
- var value = message["list"];
- if (value != null) {
- encodeFileList(value, bb);
- } else {
- throw new Error('Missing required field "list"');
- }
-}
-
-function decodeWebsocketCommandResolveIDs(bb) {
- var result = {};
-
- var length = bb.readVarUint();
- var values = (result["ptrs"] = Array(length));
- for (var i = 0; i < length; i++) values[i] = decodeStringPointer(bb);
- result["files"] = bb.readString();
- return result;
-}
-
-function encodeWebsocketCommandResolveIDs(message, bb) {
- var value = message["ptrs"];
- if (value != null) {
- var values = value,
- n = values.length;
- bb.writeVarUint(n);
- for (var i = 0; i < n; i++) {
- value = values[i];
- encodeStringPointer(value, bb);
- }
- } else {
- throw new Error('Missing required field "ptrs"');
- }
-
- var value = message["files"];
- if (value != null) {
- bb.writeString(value);
- } else {
- throw new Error('Missing required field "files"');
- }
-}
-
-function decodeWebsocketMessageManifestSuccess(bb) {
+function decodeWebsocketCommandBuildWithFilePath(bb) {
var result = {};
result["id"] = bb.readUint32();
- result["module_path"] = bb.readString();
- result["loader"] = Loader[bb.readByte()];
- result["manifest"] = decodeDependencyManifest(bb);
+ result["file_path"] = bb.readString();
return result;
}
-function encodeWebsocketMessageManifestSuccess(message, bb) {
+function encodeWebsocketCommandBuildWithFilePath(message, bb) {
var value = message["id"];
if (value != null) {
bb.writeUint32(value);
@@ -2791,76 +2697,28 @@ function encodeWebsocketMessageManifestSuccess(message, bb) {
throw new Error('Missing required field "id"');
}
- var value = message["module_path"];
+ var value = message["file_path"];
if (value != null) {
bb.writeString(value);
} else {
- throw new Error('Missing required field "module_path"');
- }
-
- var value = message["loader"];
- if (value != null) {
- var encoded = Loader[value];
- if (encoded === void 0)
- throw new Error(
- "Invalid value " + JSON.stringify(value) + ' for enum "Loader"'
- );
- bb.writeByte(encoded);
- } else {
- throw new Error('Missing required field "loader"');
- }
-
- var value = message["manifest"];
- if (value != null) {
- encodeDependencyManifest(value, bb);
- } else {
- throw new Error('Missing required field "manifest"');
+ throw new Error('Missing required field "file_path"');
}
}
-function decodeWebsocketMessageManifestFailure(bb) {
+function decodeWebsocketMessageResolveID(bb) {
var result = {};
result["id"] = bb.readUint32();
- result["from_timestamp"] = bb.readUint32();
- result["loader"] = Loader[bb.readByte()];
- result["log"] = decodeLog(bb);
return result;
}
-function encodeWebsocketMessageManifestFailure(message, bb) {
+function encodeWebsocketMessageResolveID(message, bb) {
var value = message["id"];
if (value != null) {
bb.writeUint32(value);
} else {
throw new Error('Missing required field "id"');
}
-
- var value = message["from_timestamp"];
- if (value != null) {
- bb.writeUint32(value);
- } else {
- throw new Error('Missing required field "from_timestamp"');
- }
-
- var value = message["loader"];
- if (value != null) {
- var encoded = Loader[value];
- if (encoded === void 0)
- throw new Error(
- "Invalid value " + JSON.stringify(value) + ' for enum "Loader"'
- );
- bb.writeByte(encoded);
- } else {
- throw new Error('Missing required field "loader"');
- }
-
- var value = message["log"];
- if (value != null) {
- encodeLog(value, bb);
- } else {
- throw new Error('Missing required field "log"');
- }
}
export { Loader };
@@ -2985,15 +2843,7 @@ export { decodeWebsocketMessageBuildSuccess };
export { encodeWebsocketMessageBuildSuccess };
export { decodeWebsocketMessageBuildFailure };
export { encodeWebsocketMessageBuildFailure };
-export { decodeDependencyManifest };
-export { encodeDependencyManifest };
-export { decodeFileList };
-export { encodeFileList };
-export { decodeWebsocketMessageResolveIDs };
-export { encodeWebsocketMessageResolveIDs };
-export { decodeWebsocketCommandResolveIDs };
-export { encodeWebsocketCommandResolveIDs };
-export { decodeWebsocketMessageManifestSuccess };
-export { encodeWebsocketMessageManifestSuccess };
-export { decodeWebsocketMessageManifestFailure };
-export { encodeWebsocketMessageManifestFailure };
+export { decodeWebsocketCommandBuildWithFilePath };
+export { encodeWebsocketCommandBuildWithFilePath };
+export { decodeWebsocketMessageResolveID };
+export { encodeWebsocketMessageResolveID };
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index 8afb0f30a..7c5b482a2 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -440,11 +440,14 @@ smol WebsocketMessageKind {
build_fail = 4;
manifest_success = 5;
manifest_fail = 6;
+ resolve_file = 7;
+ file_change_notification_with_hint = 8;
}
smol WebsocketCommandKind {
build = 1;
manifest = 2;
+ build_with_file_path = 3;
}
// Each websocket message has two messages in it!
@@ -492,7 +495,6 @@ struct WebsocketMessageBuildSuccess {
uint32 blob_length;
}
-
struct WebsocketMessageBuildFailure {
uint32 id;
uint32 from_timestamp;
@@ -502,37 +504,11 @@ struct WebsocketMessageBuildFailure {
Log log;
}
-// CSS @import only for now!
-struct DependencyManifest {
- uint32[] ids;
-}
-
-struct FileList {
- StringPointer[] ptrs;
- string files;
-}
-
-struct WebsocketMessageResolveIDs {
- uint32[] id;
- FileList list;
-}
-
-struct WebsocketCommandResolveIDs {
- StringPointer[] ptrs;
- string files;
-}
-
-struct WebsocketMessageManifestSuccess {
+struct WebsocketCommandBuildWithFilePath {
uint32 id;
- string module_path;
- Loader loader;
-
- DependencyManifest manifest;
+ string file_path;
}
-struct WebsocketMessageManifestFailure {
+struct WebsocketMessageResolveID {
uint32 id;
- uint32 from_timestamp;
- Loader loader;
- Log log;
-}
+} \ No newline at end of file
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 2238ea3f0..8a850c8f1 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -2338,6 +2338,12 @@ pub const Api = struct {
/// manifest_fail
manifest_fail,
+ /// resolve_file
+ resolve_file,
+
+ /// file_change_notification_with_hint
+ file_change_notification_with_hint,
+
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
@@ -2353,6 +2359,9 @@ pub const Api = struct {
/// manifest
manifest,
+ /// build_with_file_path
+ build_with_file_path,
+
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
@@ -2553,144 +2562,40 @@ pub const Api = struct {
}
};
- pub const DependencyManifest = struct {
- /// ids
- ids: []const u32,
-
- pub fn decode(reader: anytype) anyerror!DependencyManifest {
- var this = std.mem.zeroes(DependencyManifest);
-
- this.ids = try reader.readArray(u32);
- return this;
- }
-
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeArray(u32, this.ids);
- }
- };
-
- pub const FileList = struct {
- /// ptrs
- ptrs: []const StringPointer,
-
- /// files
- files: []const u8,
-
- pub fn decode(reader: anytype) anyerror!FileList {
- var this = std.mem.zeroes(FileList);
-
- this.ptrs = try reader.readArray(StringPointer);
- this.files = try reader.readValue([]const u8);
- return this;
- }
-
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeArray(StringPointer, this.ptrs);
- try writer.writeValue(@TypeOf(this.files), this.files);
- }
- };
-
- pub const WebsocketMessageResolveIDs = struct {
- /// id
- id: []const u32,
-
- /// list
- list: FileList,
-
- pub fn decode(reader: anytype) anyerror!WebsocketMessageResolveIDs {
- var this = std.mem.zeroes(WebsocketMessageResolveIDs);
-
- this.id = try reader.readArray(u32);
- this.list = try reader.readValue(FileList);
- return this;
- }
-
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeArray(u32, this.id);
- try writer.writeValue(@TypeOf(this.list), this.list);
- }
- };
-
- pub const WebsocketCommandResolveIDs = struct {
- /// ptrs
- ptrs: []const StringPointer,
-
- /// files
- files: []const u8,
-
- pub fn decode(reader: anytype) anyerror!WebsocketCommandResolveIDs {
- var this = std.mem.zeroes(WebsocketCommandResolveIDs);
-
- this.ptrs = try reader.readArray(StringPointer);
- this.files = try reader.readValue([]const u8);
- return this;
- }
-
- pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
- try writer.writeArray(StringPointer, this.ptrs);
- try writer.writeValue(@TypeOf(this.files), this.files);
- }
- };
-
- pub const WebsocketMessageManifestSuccess = struct {
+ pub const WebsocketCommandBuildWithFilePath = struct {
/// id
id: u32 = 0,
- /// module_path
- module_path: []const u8,
+ /// file_path
+ file_path: []const u8,
- /// loader
- loader: Loader,
-
- /// manifest
- manifest: DependencyManifest,
-
- pub fn decode(reader: anytype) anyerror!WebsocketMessageManifestSuccess {
- var this = std.mem.zeroes(WebsocketMessageManifestSuccess);
+ pub fn decode(reader: anytype) anyerror!WebsocketCommandBuildWithFilePath {
+ var this = std.mem.zeroes(WebsocketCommandBuildWithFilePath);
this.id = try reader.readValue(u32);
- this.module_path = try reader.readValue([]const u8);
- this.loader = try reader.readValue(Loader);
- this.manifest = try reader.readValue(DependencyManifest);
+ this.file_path = try reader.readValue([]const u8);
return this;
}
pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
try writer.writeInt(this.id);
- try writer.writeValue(@TypeOf(this.module_path), this.module_path);
- try writer.writeEnum(this.loader);
- try writer.writeValue(@TypeOf(this.manifest), this.manifest);
+ try writer.writeValue(@TypeOf(this.file_path), this.file_path);
}
};
- pub const WebsocketMessageManifestFailure = struct {
+ pub const WebsocketMessageResolveId = packed struct {
/// id
id: u32 = 0,
- /// from_timestamp
- from_timestamp: u32 = 0,
-
- /// loader
- loader: Loader,
-
- /// log
- log: Log,
-
- pub fn decode(reader: anytype) anyerror!WebsocketMessageManifestFailure {
- var this = std.mem.zeroes(WebsocketMessageManifestFailure);
+ pub fn decode(reader: anytype) anyerror!WebsocketMessageResolveId {
+ var this = std.mem.zeroes(WebsocketMessageResolveId);
this.id = try reader.readValue(u32);
- this.from_timestamp = try reader.readValue(u32);
- this.loader = try reader.readValue(Loader);
- this.log = try reader.readValue(Log);
return this;
}
pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
try writer.writeInt(this.id);
- try writer.writeInt(this.from_timestamp);
- try writer.writeEnum(this.loader);
- try writer.writeValue(@TypeOf(this.log), this.log);
}
};
};
diff --git a/src/http.zig b/src/http.zig
index c624c9bd6..09c9840e2 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -709,16 +709,7 @@ pub const RequestContext = struct {
var watchlist_slice = this.watcher.watchlist.slice();
- const index = std.mem.indexOfScalar(u32, watchlist_slice.items(.hash), id) orelse {
-
- // log.addErrorFmt(null, logger.Loc.Empty, this, "File missing from watchlist: {d}. Please refresh :(", .{hash}) catch unreachable;
- return WatchBuildResult{
- .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) },
- .id = id,
- .log = log,
- .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()),
- };
- };
+ const index = std.mem.indexOfScalar(u32, watchlist_slice.items(.hash), id) orelse return error.MissingWatchID;
const file_path_str = watchlist_slice.items(.file_path)[index];
const fd = watchlist_slice.items(.fd)[index];
@@ -1544,6 +1535,7 @@ pub const RequestContext = struct {
// Output.prettyErrorln("<r><green>101<r><d> Hot Module Reloading connected.<r>", .{});
// Output.flush();
Analytics.Features.hot_module_reloading = true;
+ var build_file_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var cmd: Api.WebsocketCommand = undefined;
var msg: Api.WebsocketMessage = .{
@@ -1580,8 +1572,9 @@ pub const RequestContext = struct {
Output.prettyErrorln("<r><red>ERR:<r> <b>Websocket failed to write.<r>", .{});
}
}
-
while (!handler.tombstone) {
+ Output.flush();
+
defer Output.flush();
handler.conn.client.getError() catch |err| {
Output.prettyErrorln("<r><red>Websocket ERR:<r> <b>{s}<r>", .{err});
@@ -1617,13 +1610,74 @@ pub const RequestContext = struct {
cmd_reader = ApiReader.init(cnst_frame, ctx.allocator);
cmd = try Api.WebsocketCommand.decode(&cmd_reader);
switch (cmd.kind) {
- .build => {
- var request = try Api.WebsocketCommandBuild.decode(&cmd_reader);
+ .build, .build_with_file_path => {
+ const request_id = if (cmd.kind == .build)
+ (try Api.WebsocketCommandBuild.decode(&cmd_reader)).id
+ else brk: {
+ const full_build = try Api.WebsocketCommandBuildWithFilePath.decode(&cmd_reader);
+ if (ctx.watcher.indexOf(full_build.id) != null) break :brk full_build.id;
+ const file_path = if (std.fs.path.isAbsolute(full_build.file_path))
+ full_build.file_path
+ else
+ ctx.bundler.fs.absBuf(
+ &[_]string{ ctx.bundler.fs.top_level_dir, full_build.file_path },
+ &build_file_path_buf,
+ );
+
+ if (Watcher.getHash(file_path) != full_build.id) {
+ Output.prettyErrorln("<r><red>ERR:<r> <b>File path hash mismatch for {s}.<r>", .{full_build.file_path});
+ continue;
+ }
+ // save because WebSocket's buffer is 8096
+ // max file path is 4096
+ var path_buf = _global.constStrToU8(file_path);
+ path_buf.ptr[path_buf.len] = 0;
+ var file_path_z: [:0]u8 = path_buf.ptr[0..path_buf.len :0];
+ const file = std.fs.openFileAbsoluteZ(file_path_z, .{ .read = true }) catch |err| {
+ Output.prettyErrorln("<r><red>ERR:<r>{s} opening file <b>{s}<r> <r>", .{ @errorName(err), full_build.file_path });
+ continue;
+ };
+ Fs.FileSystem.setMaxFd(file.handle);
+ try ctx.watcher.appendFile(
+ file.handle,
+ file_path,
+ full_build.id,
+ ctx.bundler.options.loader(Fs.PathName.init(file_path).ext),
+ 0,
+ null,
+ true,
+ );
+ break :brk full_build.id;
+ };
var arena = std.heap.ArenaAllocator.init(default_allocator);
defer arena.deinit();
- var build_result = try handler.builder.build(request.id, cmd.timestamp, arena.allocator());
+ var head = Websocket.WebsocketHeader{
+ .final = true,
+ .opcode = .Binary,
+ .mask = false,
+ .len = 0,
+ };
+
+ const build_result = handler.builder.build(request_id, cmd.timestamp, arena.allocator()) catch |err| {
+ if (err == error.MissingWatchID) {
+ msg.timestamp = cmd.timestamp;
+ msg.kind = Api.WebsocketMessageKind.resolve_file;
+ handler.message_buffer.reset();
+ var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer);
+ try msg.encode(&buffer_writer);
+ _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ const resolve_id = Api.WebsocketMessageResolveId{ .id = request_id };
+ try resolve_id.encode(&buffer_writer);
+ head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len);
+ try handler.websocket.writeHeader(head, handler.message_buffer.list.items.len);
+ _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ continue;
+ }
+
+ return err;
+ };
const file_path = switch (build_result.value) {
.fail => |fail| fail.module_path,
.success => |fail| fail.module_path,
@@ -1652,39 +1706,35 @@ pub const RequestContext = struct {
},
}
- defer Output.flush();
- msg.timestamp = build_result.timestamp;
- msg.kind = switch (build_result.value) {
- .success => .build_success,
- else => .build_fail,
- };
- handler.message_buffer.reset();
- var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer);
- try msg.encode(&buffer_writer);
- var head = Websocket.WebsocketHeader{
- .final = true,
- .opcode = .Binary,
- .mask = false,
- .len = 0,
- };
-
- switch (build_result.value) {
- .success => |success| {
- try success.encode(&buffer_writer);
- const total = handler.message_buffer.list.items.len + build_result.bytes.len;
- head.len = Websocket.WebsocketHeader.packLength(total);
- try handler.websocket.writeHeader(head, total);
- _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
- if (build_result.bytes.len > 0) {
- _ = try handler.conn.client.write(build_result.bytes, SOCKET_FLAGS);
- }
- },
- .fail => |fail| {
- try fail.encode(&buffer_writer);
- head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len);
- try handler.websocket.writeHeader(head, handler.message_buffer.list.items.len);
- _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
- },
+ {
+ defer Output.flush();
+ msg.timestamp = build_result.timestamp;
+ msg.kind = switch (build_result.value) {
+ .success => .build_success,
+ else => .build_fail,
+ };
+ handler.message_buffer.reset();
+ var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer);
+ try msg.encode(&buffer_writer);
+
+ switch (build_result.value) {
+ .success => |success| {
+ try success.encode(&buffer_writer);
+ const total = handler.message_buffer.list.items.len + build_result.bytes.len;
+ head.len = Websocket.WebsocketHeader.packLength(total);
+ try handler.websocket.writeHeader(head, total);
+ _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ if (build_result.bytes.len > 0) {
+ _ = try handler.conn.client.write(build_result.bytes, SOCKET_FLAGS);
+ }
+ },
+ .fail => |fail| {
+ try fail.encode(&buffer_writer);
+ head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len);
+ try handler.websocket.writeHeader(head, handler.message_buffer.list.items.len);
+ _ = try handler.conn.client.write(handler.message_buffer.list.items, SOCKET_FLAGS);
+ },
+ }
}
},
else => {
@@ -2176,8 +2226,12 @@ pub const RequestContext = struct {
const path = ctx.url.path["bun:".len..];
if (strings.eqlComptime(path, "_api.hmr")) {
- try ctx.handleWebsocket(server);
- return;
+ if (ctx.header("Upgrade")) |upgrade| {
+ if (strings.eqlCaseInsensitiveASCII(upgrade.value, "websocket", true)) {
+ try ctx.handleWebsocket(server);
+ return;
+ }
+ }
}
if (strings.eqlComptime(path, "error.js")) {
@@ -2482,23 +2536,25 @@ pub const Server = struct {
fallback_only: bool = false,
threadlocal var filechange_buf: [32]u8 = undefined;
+ threadlocal var filechange_buf_hinted: [32]u8 = undefined;
pub fn onFileUpdate(
ctx: *Server,
events: []watcher.WatchEvent,
+ changed_files: []?[:0]u8,
watchlist: watcher.Watchlist,
) void {
if (ctx.javascript_enabled) {
if (Output.isEmojiEnabled()) {
- _onFileUpdate(ctx, events, watchlist, true, true);
+ _onFileUpdate(ctx, events, changed_files, watchlist, true, true);
} else {
- _onFileUpdate(ctx, events, watchlist, true, false);
+ _onFileUpdate(ctx, events, changed_files, watchlist, true, false);
}
} else {
if (Output.isEmojiEnabled()) {
- _onFileUpdate(ctx, events, watchlist, false, true);
+ _onFileUpdate(ctx, events, changed_files, watchlist, false, true);
} else {
- _onFileUpdate(ctx, events, watchlist, false, false);
+ _onFileUpdate(ctx, events, changed_files, watchlist, false, false);
}
}
}
@@ -2506,21 +2562,38 @@ pub const Server = struct {
fn _onFileUpdate(
ctx: *Server,
events: []watcher.WatchEvent,
+ changed_files: []?[:0]u8,
watchlist: watcher.Watchlist,
comptime is_javascript_enabled: bool,
comptime is_emoji_enabled: bool,
) void {
var fbs = std.io.fixedBufferStream(&filechange_buf);
- var writer = ByteApiWriter.init(&fbs);
- const message_type = Api.WebsocketMessage{
- .timestamp = RequestContext.WebsocketHandler.toTimestamp(ctx.timer.read()),
- .kind = .file_change_notification,
- };
- message_type.encode(&writer) catch unreachable;
+ var hinted_fbs = std.io.fixedBufferStream(&filechange_buf_hinted);
+ {
+ var writer = ByteApiWriter.init(&fbs);
+ const message_type = Api.WebsocketMessage{
+ .timestamp = RequestContext.WebsocketHandler.toTimestamp(ctx.timer.read()),
+ .kind = .file_change_notification,
+ };
+
+ message_type.encode(&writer) catch unreachable;
+ }
+
+ {
+ var writer = ByteApiWriter.init(&hinted_fbs);
+ const message_type = Api.WebsocketMessage{
+ .timestamp = RequestContext.WebsocketHandler.toTimestamp(ctx.timer.read()),
+ .kind = Api.WebsocketMessageKind.file_change_notification_with_hint,
+ };
+
+ message_type.encode(&writer) catch unreachable;
+ }
+
var slice = watchlist.slice();
const file_paths = slice.items(.file_path);
var counts = slice.items(.count);
const kinds = slice.items(.kind);
+ const hashes = slice.items(.hash);
var header = fbs.getWritten();
defer ctx.watcher.flushEvictions();
defer Output.flush();
@@ -2538,8 +2611,9 @@ pub const Server = struct {
// so it's consistent with the rest
// if we use .extname we might run into an issue with whether or not the "." is included.
const path = Fs.PathName.init(file_path);
- const id = watchlist.items(.hash)[event.index];
+ const id = hashes[event.index];
var content_fbs = std.io.fixedBufferStream(filechange_buf[header.len..]);
+ var hinted_content_fbs = std.io.fixedBufferStream(filechange_buf_hinted[header.len..]);
defer {
if (comptime is_javascript_enabled) {
@@ -2548,6 +2622,10 @@ pub const Server = struct {
}
}
+ if (comptime Environment.isDebug) {
+ Output.prettyErrorln("[watcher] {s}: -- {}", .{ @tagName(kind), event.op });
+ }
+
switch (kind) {
.file => {
if (event.op.delete or event.op.rename) {
@@ -2562,11 +2640,6 @@ pub const Server = struct {
Output.prettyErrorln("<r><d>File changed: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
}
} else {
- if (event.op.move) {
- var fds = ctx.watcher.watchlist.items(.fd);
- fds[event.index] = 0;
- }
-
const change_message = Api.WebsocketMessageFileChangeNotification{
.id = id,
.loader = (ctx.bundler.options.loaders.get(path.ext) orelse .file).toAPI(),
@@ -2587,12 +2660,68 @@ pub const Server = struct {
}
},
.directory => {
+ const affected = event.names(changed_files);
+
+ if (affected.len > 0) {
+ if (rfs.entries.get(file_path)) |dir_ent| {
+ var last_file_hash: Watcher.HashType = std.math.maxInt(Watcher.HashType);
+ var already_had_all_affected = true;
+ for (affected) |changed_name_ptr| {
+ const changed_name: []const u8 = std.mem.span((changed_name_ptr orelse continue));
+ const loader = (ctx.bundler.options.loaders.get(Fs.PathName.init(changed_name).ext) orelse .file);
+ if (loader.isJavaScriptLikeOrJSON() or loader == .css) {
+ if (dir_ent.entries.get(changed_name)) |file_ent| {
+ const abs_path = file_ent.entry.abs_path.slice();
+ const file_hash = Watcher.getHash(abs_path);
+
+ // skip consecutive duplicates
+ if (last_file_hash == file_hash) continue;
+ last_file_hash = file_hash;
+
+ // reset the file descriptor
+ file_ent.entry.cache.fd = 0;
+ file_ent.entry.need_stat = true;
+
+ const change_message = Api.WebsocketMessageFileChangeNotification{
+ .id = file_hash,
+ .loader = loader.toAPI(),
+ };
+
+ var content_writer = ByteApiWriter.init(&hinted_content_fbs);
+ change_message.encode(&content_writer) catch unreachable;
+ const change_buf = hinted_content_fbs.getWritten();
+ const written_buf = filechange_buf_hinted[0 .. header.len + change_buf.len];
+ RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| {
+ Output.prettyErrorln("Error writing change notification: {s}<r>", .{@errorName(err)});
+ };
+ if (comptime is_emoji_enabled) {
+ Output.prettyErrorln("<r>📜 <d>File change: {s}<r>", .{ctx.bundler.fs.relativeTo(abs_path)});
+ } else {
+ Output.prettyErrorln("<r> <d>File change: {s}<r>", .{ctx.bundler.fs.relativeTo(abs_path)});
+ }
+ } else {
+ already_had_all_affected = false;
+ }
+ }
+ }
+
+ // When the only operation in a directory was moving new files into it, and we were already watching the existing files
+ // We don't need to invalidate the directory entries
+ // We only need to invalidate the file descriptor
+ if (already_had_all_affected and event.op.move_to and !event.op.delete and
+ !event.op.rename and
+ !event.op.write)
+ {
+ continue;
+ }
+ }
+ }
+
rfs.bustEntriesCache(file_path);
ctx.bundler.resolver.dir_cache.remove(file_path);
// if (event.op.delete or event.op.rename)
// ctx.watcher.removeAtIndex(event.index, hashes[event.index], parent_hashes, .directory);
-
if (comptime is_emoji_enabled) {
Output.prettyErrorln("<r>📁 <d>Dir change: {s}<r>", .{ctx.bundler.fs.relativeTo(file_path)});
} else {
diff --git a/src/http/websocket.zig b/src/http/websocket.zig
index caa511670..01ddb8bc8 100644
--- a/src/http/websocket.zig
+++ b/src/http/websocket.zig
@@ -112,7 +112,7 @@ pub const Websocket = struct {
conn: *tcp.Connection,
err: ?anyerror = null,
- buf: [4096]u8 = undefined,
+ buf: [8096]u8 = undefined,
read_stream: ReadStream,
reader: ReadStream.Reader,
flags: u32 = 0,
diff --git a/src/http_client_async.zig b/src/http_client_async.zig
index 6c3d9585a..947e8a944 100644
--- a/src/http_client_async.zig
+++ b/src/http_client_async.zig
@@ -22,7 +22,7 @@ const AsyncIO = @import("io");
const ThreadPool = @import("thread_pool");
const boring = @import("boringssl");
pub const NetworkThread = @import("./network_thread.zig");
-
+const ObjectPool = @import("./pool.zig").ObjectPool;
const SOCK = os.SOCK;
pub const Headers = struct {
@@ -222,11 +222,21 @@ pub const HeaderBuilder = struct {
};
pub const HTTPChannel = @import("./sync.zig").Channel(*AsyncHTTP, .{ .Static = 1000 });
-
// 32 pointers much cheaper than 1000 pointers
-const SingleHTTPChannel = @import("./sync.zig").Channel(*AsyncHTTP, .{ .Static = 32 });
-var send_sync_channel: SingleHTTPChannel = undefined;
-var send_sync_channel_loaded: bool = false;
+const SingleHTTPChannel = struct {
+ const SingleHTTPCHannel_ = @import("./sync.zig").Channel(*AsyncHTTP, .{ .Static = 8 });
+ channel: SingleHTTPCHannel_,
+ pub fn reset(_: *@This()) void {
+
+ }
+ pub fn init(_: std.mem.Allocator) anyerror!SingleHTTPChannel {
+ return SingleHTTPChannel{
+ .channel = SingleHTTPCHannel_.init()
+ };
+ }
+};
+
+const SingleHTTPChannelPool = ObjectPool(SingleHTTPChannel, SingleHTTPChannel.init, false);
pub const HTTPChannelContext = struct {
http: AsyncHTTP = undefined,
@@ -270,6 +280,7 @@ pub const AsyncHTTP = struct {
/// Callback runs when request finishes
/// Executes on the network thread
callback: ?CompletionCallback = null,
+ callback_ctx: ?*anyopaque = null,
pub const CompletionCallback = fn (this: *AsyncHTTP, sender: *HTTPSender) void;
pub var active_requests_count = std.atomic.Atomic(u32).init(0);
@@ -317,22 +328,26 @@ pub const AsyncHTTP = struct {
}
fn sendSyncCallback(this: *AsyncHTTP, sender: *HTTPSender) void {
- send_sync_channel.writeItem(this) catch unreachable;
+ var pooled_node = @ptrCast(*SingleHTTPChannelPool.Node, @alignCast(@alignOf(*SingleHTTPChannelPool.Node), this.callback_ctx.?));
+ pooled_node.data.channel.writeItem(this) catch unreachable;
sender.release();
}
pub fn sendSync(this: *AsyncHTTP) anyerror!picohttp.Response {
- if (!send_sync_channel_loaded) {
- send_sync_channel_loaded = true;
- send_sync_channel = SingleHTTPChannel.init();
+ this.callback_ctx = SingleHTTPChannelPool.get(default_allocator);
+ defer {
+ var pooled_node = @ptrCast(*SingleHTTPChannelPool.Node, @alignCast(@alignOf(*SingleHTTPChannelPool.Node), this.callback_ctx.?));
+ SingleHTTPChannelPool.release(pooled_node);
+ this.callback_ctx = null;
}
-
this.callback = sendSyncCallback;
+
var batch = NetworkThread.Batch{};
this.schedule(this.allocator, &batch);
NetworkThread.global.pool.schedule(batch);
while (true) {
- var async_http: *AsyncHTTP = (send_sync_channel.tryReadItem() catch unreachable) orelse {
+ var pooled = @ptrCast(*SingleHTTPChannelPool.Node, @alignCast(@alignOf(*SingleHTTPChannelPool.Node), this.callback_ctx.?));
+ var async_http: *AsyncHTTP = (pooled.data.channel.tryReadItem() catch unreachable) orelse {
std.atomic.spinLoopHint();
std.time.sleep(std.time.ns_per_us * 100);
continue;
diff --git a/src/runtime/hmr.ts b/src/runtime/hmr.ts
index 3803dff2e..965339f80 100644
--- a/src/runtime/hmr.ts
+++ b/src/runtime/hmr.ts
@@ -3,6 +3,7 @@ import * as API from "../api/schema";
var __HMRModule, __FastRefreshModule, __HMRClient, __injectFastRefresh;
if (typeof window !== "undefined") {
+ var textEncoder: TextEncoder;
// We add a scope here to minimize chances of namespace collisions
var runOnce = false;
var clientStartTime = 0;
@@ -185,7 +186,7 @@ if (typeof window !== "undefined") {
return CSSLoader.cssLoadId.bundle_id;
}
- private findCSSLinkTag(id: number): CSSHMRInsertionPoint | null {
+ findCSSLinkTag(id: number): CSSHMRInsertionPoint | null {
let count = 0;
let match: CSSHMRInsertionPoint = null;
@@ -327,6 +328,7 @@ if (typeof window !== "undefined") {
}
let filepath = update.file;
+ // We cannot safely do this because the hash would change on the server
if (filepath.startsWith(this.hmr.cwd)) {
filepath = filepath.substring(this.hmr.cwd.length);
}
@@ -846,10 +848,10 @@ if (typeof window !== "undefined") {
}
}
- handleFileChangeNotification(buffer: ByteBuffer, timestamp: number) {
+ handleFileChangeNotification(buffer: ByteBuffer, timestamp: number, copy_file_path: boolean) {
const notification =
API.decodeWebsocketMessageFileChangeNotification(buffer);
- let file_path = "";
+ let file_path = "";
switch (notification.loader) {
case API.Loader.css: {
file_path = this.loaders.css.filePath(notification);
@@ -866,6 +868,10 @@ if (typeof window !== "undefined") {
}
}
+ return this.handleFileChangeNotificationBase(timestamp, notification, file_path, copy_file_path);
+ }
+
+ private handleFileChangeNotificationBase(timestamp: number, notification: API.WebsocketMessageFileChangeNotification, file_path: string, copy_file_path: boolean) {
const accept = file_path && file_path.length > 0;
if (!accept) {
@@ -923,12 +929,34 @@ if (typeof window !== "undefined") {
switch (reloadBehavior) {
// This is the same command/logic for both JS and CSS hot reloading.
case ReloadBehavior.hotReload: {
- this.buildCommandBuf[0] = API.WebsocketCommandKind.build;
+ if (copy_file_path && !this.buildCommandBufWithFilePath) {
+ // on Linux, max file path length is 4096 bytes
+ // on macOS & Windows, max file path length is 1024 bytes
+ // 256 is extra breathing room
+ this.buildCommandBufWithFilePath = new Uint8Array(4096 + 256);
+ }
+
+ const writeBuffer = !copy_file_path ? this.buildCommandBuf : this.buildCommandBufWithFilePath;
+ writeBuffer[0] = !copy_file_path ? API.WebsocketCommandKind.build : API.WebsocketCommandKind.build_with_file_path;
this.buildCommandUArray[0] = timestamp;
- this.buildCommandBuf.set(this.buildCommandUArrayEight, 1);
+ writeBuffer.set(this.buildCommandUArrayEight, 1);
this.buildCommandUArray[0] = notification.id;
- this.buildCommandBuf.set(this.buildCommandUArrayEight, 5);
- this.socket.send(this.buildCommandBuf);
+ writeBuffer.set(this.buildCommandUArrayEight, 5);
+
+ if (copy_file_path) {
+ if (!textEncoder) {
+ textEncoder = new TextEncoder();
+ }
+
+ this.buildCommandUArray[0] = file_path.length;
+ writeBuffer.set(this.buildCommandUArrayEight, 9);
+
+ const out = textEncoder.encodeInto(file_path, writeBuffer.subarray(13));
+ this.socket.send(this.buildCommandBufWithFilePath.subarray(0, 13 + out.written));
+ } else {
+ this.socket.send(this.buildCommandBuf);
+ }
+
if (this.verbose) {
__hmrlog.debug(`Requesting update for ${file_path}`);
}
@@ -941,10 +969,14 @@ if (typeof window !== "undefined") {
}
}
}
+
buildCommandBuf = new Uint8Array(9);
buildCommandUArray = new Uint32Array(1);
buildCommandUArrayEight = new Uint8Array(this.buildCommandUArray.buffer);
+ // lazily allocate because it's going to be much larger than 9 bytes
+ buildCommandBufWithFilePath: Uint8Array;
+
// On open, reset the delay for reconnecting
handleOpen = (event: Event) => {
globalThis.clearTimeout(this.nextReconnectAttempt);
@@ -974,8 +1006,84 @@ if (typeof window !== "undefined") {
break;
}
+ case API.WebsocketMessageKind.resolve_file: {
+ const {id} = API.decodeWebsocketMessageResolveID(buffer);
+ const timestamp = this.builds.get(id) || 0;
+
+ if (timestamp == 0 && HotReload.VERBOSE) {
+ __hmrlog.debug(`Unknown module? ${id}`);
+ return;
+ }
+
+ const index = HMRModule.dependencies.graph.indexOf(id);
+ var file_path: string = "";
+ var loader = API.Loader.js;
+ if (index > -1) {
+ file_path = HMRModule.dependencies.modules[index].file_path;
+ } else {
+ const tag = this.loaders.css.findCSSLinkTag(id);
+ if (tag && tag.file.length) {
+ file_path = tag.file;
+ }
+ }
+
+ if (!file_path || file_path.length === 0) {
+ if (HotReload.VERBOSE) {
+ __hmrlog.debug(`Unknown module? ${id}`);
+ }
+ return;
+ }
+
+ switch (file_path.substring(file_path.lastIndexOf("."))) {
+ case ".css": {
+ loader = API.Loader.css;
+ break;
+ }
+
+ case ".mjs":
+ case ".cjs":
+ case ".js": {
+ loader = API.Loader.js;
+ break;
+ }
+
+ case ".json": {
+ loader = API.Loader.json;
+ break;
+ }
+
+ case ".cts":
+ case ".mts":
+ case ".ts": {
+ loader = API.Loader.ts;
+ break;
+ }
+
+ case ".tsx": {
+ loader = API.Loader.tsx;
+ break;
+ }
+
+ case ".jsx": {
+ loader = API.Loader.jsx;
+ break;
+ }
+
+ default: {
+ loader = API.Loader.file;
+ break;
+ }
+ }
+
+ this.handleFileChangeNotificationBase(timestamp, {id, loader}, file_path, true);
+ break;
+ }
case API.WebsocketMessageKind.file_change_notification: {
- this.handleFileChangeNotification(buffer, header.timestamp);
+ this.handleFileChangeNotification(buffer, header.timestamp, false);
+ break;
+ }
+ case API.WebsocketMessageKind.file_change_notification_with_hint: {
+ this.handleFileChangeNotification(buffer, header.timestamp, true);
break;
}
case API.WebsocketMessageKind.welcome: {
diff --git a/src/watcher.zig b/src/watcher.zig
index 7aa5c2ccf..d97be28a1 100644
--- a/src/watcher.zig
+++ b/src/watcher.zig
@@ -20,7 +20,7 @@ const os = std.os;
const Mutex = @import("./lock.zig").Lock;
const Futex = @import("./futex.zig");
-const WatchItemIndex = u16;
+pub const WatchItemIndex = u16;
const NoWatchItem: WatchItemIndex = std.math.maxInt(WatchItemIndex);
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
@@ -65,6 +65,16 @@ pub const INotify = struct {
mask: u32,
cookie: u32,
name_len: u32,
+
+ pub fn name(this: *const INotifyEvent) [:0]u8 {
+ if (comptime Environment.allow_assert) std.debug.assert(this.name_len > 0);
+
+ // the name_len field is wrong
+ // it includes alignment / padding
+ // but it is a sentineled value
+ // so we can just trim it to the first null byte
+ return std.mem.sliceTo(@intToPtr([*]u8, @ptrToInt(this) + @sizeOf(INotifyEvent))[0..this.name_len:0], 0);
+ }
};
pub var inotify_fd: EventListIndex = 0;
pub var loaded_inotify = false;
@@ -208,6 +218,13 @@ pub const WatchItem = struct {
pub const WatchEvent = struct {
index: WatchItemIndex,
op: Op,
+ name_off: u8 = 0,
+ name_len: u8 = 0,
+
+ pub fn names(this: WatchEvent, buf: []?[:0]u8) []?[:0]u8 {
+ if (this.name_len == 0) return &[_]?[:0]u8{};
+ return buf[this.name_off..][0..this.name_len];
+ }
const KEvent = std.c.Kevent;
@@ -218,15 +235,17 @@ pub const WatchEvent = struct {
}
pub fn merge(this: *WatchEvent, other: WatchEvent) void {
+ this.name_len += other.name_len;
this.op = Op{
.delete = this.op.delete or other.op.delete,
.metadata = this.op.metadata or other.op.metadata,
.rename = this.op.rename or other.op.rename,
- .move = this.op.move or other.op.move,
.write = this.op.write or other.op.write,
};
}
+
+
pub fn fromKEvent(this: *WatchEvent, kevent: KEvent) void {
this.* =
WatchEvent{
@@ -234,20 +253,21 @@ pub const WatchEvent = struct {
.delete = (kevent.fflags & std.c.NOTE_DELETE) > 0,
.metadata = (kevent.fflags & std.c.NOTE_ATTRIB) > 0,
.rename = (kevent.fflags & std.c.NOTE_RENAME) > 0,
- .move = false, // unhandled
.write = (kevent.fflags & std.c.NOTE_WRITE) > 0,
},
.index = @truncate(WatchItemIndex, kevent.udata),
};
}
+
+
pub fn fromINotify(this: *WatchEvent, event: INotify.INotifyEvent, index: WatchItemIndex) void {
this.* = WatchEvent{
.op = Op{
.delete = (event.mask & INotify.IN_DELETE_SELF) > 0 or (event.mask & INotify.IN_DELETE) > 0,
.metadata = false,
.rename = (event.mask & INotify.IN_MOVE_SELF) > 0,
- .move = (event.mask & INotify.IN_MOVED_TO) > 0,
+ .move_to = (event.mask & INotify.IN_MOVED_TO) > 0,
.write = (event.mask & INotify.IN_MODIFY) > 0,
},
.index = index,
@@ -255,12 +275,16 @@ pub const WatchEvent = struct {
}
pub const Op = packed struct {
+ padding: u3 = 0,
+
delete: bool = false,
metadata: bool = false,
rename: bool = false,
write: bool = false,
- move: bool = false,
+ move_to: bool = false,
};
+
+
};
pub const Watchlist = std.MultiArrayList(WatchItem);
@@ -281,6 +305,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
// User-facing
watch_events: [128]WatchEvent = undefined,
+ changed_filepaths: [128]?[:0]u8 = std.mem.zeroes([128]?[:0]u8),
fs: *Fs.FileSystem,
// this is what kqueue knows about
@@ -424,7 +449,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
watchevents[i].fromKEvent(event);
}
- this.ctx.onFileUpdate(watchevents, this.watchlist);
+ this.ctx.onFileUpdate(watchevents, this.changed_filepaths[0..watchevents.len], this.watchlist);
}
} else if (Environment.isLinux) {
restart: while (true) {
@@ -434,9 +459,12 @@ pub fn NewWatcher(comptime ContextType: type) type {
// TODO: is this thread safe?
const eventlist_index = this.watchlist.items(.eventlist_index);
var remaining_events = events.len;
+ var name_off: u8 = 0;
+ var temp_name_list: [128]?[:0]u8 = undefined;
+ var temp_name_off: u8 = 0;
while (remaining_events > 0) {
- const slice = events[0..std.math.min(remaining_events, this.watch_events.len)];
+ const slice = events[0..@minimum(remaining_events, this.watch_events.len)];
var watchevents = this.watch_events[0..slice.len];
var watch_event_id: u32 = 0;
for (slice) |event| {
@@ -451,7 +479,14 @@ pub fn NewWatcher(comptime ContextType: type) type {
) orelse continue,
),
);
-
+ temp_name_list[temp_name_off] = if (event.name_len > 0)
+ event.name()
+ else
+ null;
+ watchevents[watch_event_id].name_off = temp_name_off;
+ watchevents[watch_event_id].name_len = @as(u8, @boolToInt((event.name_len > 0)));
+ temp_name_off += @as(u8, @boolToInt((event.name_len > 0)));
+
watch_event_id += 1;
}
@@ -460,16 +495,25 @@ pub fn NewWatcher(comptime ContextType: type) type {
var last_event_index: usize = 0;
var last_event_id: INotify.EventListIndex = std.math.maxInt(INotify.EventListIndex);
- for (all_events) |event, i| {
- if (event.index == last_event_id) {
- all_events[last_event_index].merge(event);
+
+
+
+ for (all_events) |_, i| {
+ if (all_events[i].name_len > 0) {
+ this.changed_filepaths[name_off] = temp_name_list[all_events[i].name_off];
+ all_events[i].name_off = name_off;
+ name_off += 1;
+ }
+
+ if (all_events[i].index == last_event_id) {
+ all_events[last_event_index].merge(all_events[i]);
continue;
}
last_event_index = i;
- last_event_id = event.index;
+ last_event_id = all_events[i].index;
}
if (all_events.len == 0) continue :restart;
- this.ctx.onFileUpdate(all_events[0 .. last_event_index + 1], this.watchlist);
+ this.ctx.onFileUpdate(all_events[0 .. last_event_index + 1], this.changed_filepaths[0..name_off + 1], this.watchlist);
remaining_events -= slice.len;
}
}