aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-06-18 00:51:11 -0700
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-06-18 00:51:11 -0700
commit6dce0c1e032edd9ff70f4a1c417a8662d1012e8b (patch)
tree23e3af849a0bbbf8943227941f900dc91421a358 /src
parent9ca283bb43ebee74bf36af50807474b962ac44a1 (diff)
downloadbun-6dce0c1e032edd9ff70f4a1c417a8662d1012e8b.tar.gz
bun-6dce0c1e032edd9ff70f4a1c417a8662d1012e8b.tar.zst
bun-6dce0c1e032edd9ff70f4a1c417a8662d1012e8b.zip
100x!!
Former-commit-id: e0fa2e78da8083dc590c4b1f3d016ba545261b84
Diffstat (limited to 'src')
-rw-r--r--src/api/schema.d.ts47
-rw-r--r--src/api/schema.js154
-rw-r--r--src/api/schema.peechy38
-rw-r--r--src/api/schema.zig103
-rw-r--r--src/bundler.zig80
-rw-r--r--src/css_scanner.zig265
-rw-r--r--src/feature_flags.zig2
-rw-r--r--src/http.zig79
-rw-r--r--src/http/mime_type.zig2
-rw-r--r--src/js_printer.zig16
-rw-r--r--src/linker.zig53
-rw-r--r--src/runtime/hmr.ts142
-rw-r--r--src/watcher.zig12
13 files changed, 895 insertions, 98 deletions
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index e5370097c..367932105 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -123,7 +123,9 @@ type uint32 = number;
welcome = 1,
file_change_notification = 2,
build_success = 3,
- build_fail = 4
+ build_fail = 4,
+ manifest_success = 5,
+ manifest_fail = 6
}
export const WebsocketMessageKindKeys = {
1: "welcome",
@@ -133,14 +135,21 @@ type uint32 = number;
3: "build_success",
build_success: "build_success",
4: "build_fail",
- build_fail: "build_fail"
+ build_fail: "build_fail",
+ 5: "manifest_success",
+ manifest_success: "manifest_success",
+ 6: "manifest_fail",
+ manifest_fail: "manifest_fail"
}
export enum WebsocketCommandKind {
- build = 1
+ build = 1,
+ manifest = 2
}
export const WebsocketCommandKindKeys = {
1: "build",
- build: "build"
+ build: "build",
+ 2: "manifest",
+ manifest: "manifest"
}
export interface JSX {
factory: string;
@@ -308,6 +317,10 @@ type uint32 = number;
id: uint32;
}
+ export interface WebsocketCommandManifest {
+ id: uint32;
+ }
+
export interface WebsocketMessageBuildSuccess {
id: uint32;
from_timestamp: uint32;
@@ -324,6 +337,24 @@ type uint32 = number;
log: Log;
}
+ export interface DependencyManifest {
+ ids: Uint32Array;
+ }
+
+ export interface WebsocketMessageManifestSuccess {
+ id: uint32;
+ module_path: string;
+ loader: Loader;
+ manifest: DependencyManifest;
+ }
+
+ export interface WebsocketMessageManifestFailure {
+ id: uint32;
+ from_timestamp: uint32;
+ loader: Loader;
+ log: Log;
+ }
+
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeStringPointer(message: StringPointer, bb: ByteBuffer): void;
@@ -372,7 +403,15 @@ type uint32 = number;
export declare function decodeWebsocketCommand(buffer: ByteBuffer): WebsocketCommand;
export declare function encodeWebsocketCommandBuild(message: WebsocketCommandBuild, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandBuild(buffer: ByteBuffer): WebsocketCommandBuild;
+ export declare function encodeWebsocketCommandManifest(message: WebsocketCommandManifest, bb: ByteBuffer): void;
+ export declare function decodeWebsocketCommandManifest(buffer: ByteBuffer): WebsocketCommandManifest;
export declare function encodeWebsocketMessageBuildSuccess(message: WebsocketMessageBuildSuccess, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageBuildSuccess(buffer: ByteBuffer): WebsocketMessageBuildSuccess;
export declare function encodeWebsocketMessageBuildFailure(message: WebsocketMessageBuildFailure, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageBuildFailure(buffer: ByteBuffer): WebsocketMessageBuildFailure;
+ export declare function encodeDependencyManifest(message: DependencyManifest, bb: ByteBuffer): void;
+ export declare function decodeDependencyManifest(buffer: ByteBuffer): DependencyManifest;
+ export declare function encodeWebsocketMessageManifestSuccess(message: WebsocketMessageManifestSuccess, bb: ByteBuffer): void;
+ export declare function decodeWebsocketMessageManifestSuccess(buffer: ByteBuffer): WebsocketMessageManifestSuccess;
+ export declare function encodeWebsocketMessageManifestFailure(message: WebsocketMessageManifestFailure, bb: ByteBuffer): void;
+ export declare function decodeWebsocketMessageManifestFailure(buffer: ByteBuffer): WebsocketMessageManifestFailure;
diff --git a/src/api/schema.js b/src/api/schema.js
index 8882ecc30..f460064e5 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -1241,28 +1241,40 @@ const WebsocketMessageKind = {
"2": 2,
"3": 3,
"4": 4,
+ "5": 5,
+ "6": 6,
"welcome": 1,
"file_change_notification": 2,
"build_success": 3,
- "build_fail": 4
+ "build_fail": 4,
+ "manifest_success": 5,
+ "manifest_fail": 6
};
const WebsocketMessageKindKeys = {
"1": "welcome",
"2": "file_change_notification",
"3": "build_success",
"4": "build_fail",
+ "5": "manifest_success",
+ "6": "manifest_fail",
"welcome": "welcome",
"file_change_notification": "file_change_notification",
"build_success": "build_success",
- "build_fail": "build_fail"
+ "build_fail": "build_fail",
+ "manifest_success": "manifest_success",
+ "manifest_fail": "manifest_fail"
};
const WebsocketCommandKind = {
"1": 1,
- "build": 1
+ "2": 2,
+ "build": 1,
+ "manifest": 2
};
const WebsocketCommandKindKeys = {
"1": "build",
- "build": "build"
+ "2": "manifest",
+ "build": "build",
+ "manifest": "manifest"
};
function decodeWebsocketMessage(bb) {
@@ -1385,6 +1397,24 @@ function encodeWebsocketCommandBuild(message, bb) {
}
+function decodeWebsocketCommandManifest(bb) {
+ var result = {};
+
+ result["id"] = bb.readUint32();
+ return result;
+}
+
+function encodeWebsocketCommandManifest(message, bb) {
+
+ var value = message["id"];
+ if (value != null) {
+ bb.writeUint32(value);
+ } else {
+ throw new Error("Missing required field \"id\"");
+ }
+
+}
+
function decodeWebsocketMessageBuildSuccess(bb) {
var result = {};
@@ -1489,6 +1519,112 @@ bb.writeByte(encoded);
}
+function decodeDependencyManifest(bb) {
+ var result = {};
+
+ result["ids"] = bb.readUint32ByteArray();
+ return result;
+}
+
+function encodeDependencyManifest(message, bb) {
+
+ var value = message["ids"];
+ if (value != null) {
+ bb.writeUint32ByteArray(value);
+ } else {
+ throw new Error("Missing required field \"ids\"");
+ }
+
+}
+
+function decodeWebsocketMessageManifestSuccess(bb) {
+ var result = {};
+
+ result["id"] = bb.readUint32();
+ result["module_path"] = bb.readString();
+ result["loader"] = Loader[bb.readByte()];
+ result["manifest"] = decodeDependencyManifest(bb);
+ return result;
+}
+
+function encodeWebsocketMessageManifestSuccess(message, bb) {
+
+ var value = message["id"];
+ if (value != null) {
+ bb.writeUint32(value);
+ } else {
+ throw new Error("Missing required field \"id\"");
+ }
+
+ var value = message["module_path"];
+ if (value != null) {
+ bb.writeString(value);
+ } else {
+ throw new Error("Missing required field \"module_path\"");
+ }
+
+ var value = message["loader"];
+ if (value != null) {
+ var encoded = Loader[value];
+if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
+bb.writeByte(encoded);
+ } else {
+ throw new Error("Missing required field \"loader\"");
+ }
+
+ var value = message["manifest"];
+ if (value != null) {
+ encodeDependencyManifest(value, bb);
+ } else {
+ throw new Error("Missing required field \"manifest\"");
+ }
+
+}
+
+function decodeWebsocketMessageManifestFailure(bb) {
+ var result = {};
+
+ result["id"] = bb.readUint32();
+ result["from_timestamp"] = bb.readUint32();
+ result["loader"] = Loader[bb.readByte()];
+ result["log"] = decodeLog(bb);
+ return result;
+}
+
+function encodeWebsocketMessageManifestFailure(message, bb) {
+
+ var value = message["id"];
+ if (value != null) {
+ bb.writeUint32(value);
+ } else {
+ throw new Error("Missing required field \"id\"");
+ }
+
+ var value = message["from_timestamp"];
+ if (value != null) {
+ bb.writeUint32(value);
+ } else {
+ throw new Error("Missing required field \"from_timestamp\"");
+ }
+
+ var value = message["loader"];
+ if (value != null) {
+ var encoded = Loader[value];
+if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"Loader\"");
+bb.writeByte(encoded);
+ } else {
+ throw new Error("Missing required field \"loader\"");
+ }
+
+ var value = message["log"];
+ if (value != null) {
+ encodeLog(value, bb);
+ } else {
+ throw new Error("Missing required field \"log\"");
+ }
+
+}
+
export { Loader }
export { LoaderKeys }
export { ResolveMode }
@@ -1557,7 +1693,15 @@ export { decodeWebsocketCommand }
export { encodeWebsocketCommand }
export { decodeWebsocketCommandBuild }
export { encodeWebsocketCommandBuild }
+export { decodeWebsocketCommandManifest }
+export { encodeWebsocketCommandManifest }
export { decodeWebsocketMessageBuildSuccess }
export { encodeWebsocketMessageBuildSuccess }
export { decodeWebsocketMessageBuildFailure }
-export { encodeWebsocketMessageBuildFailure } \ No newline at end of file
+export { encodeWebsocketMessageBuildFailure }
+export { decodeDependencyManifest }
+export { encodeDependencyManifest }
+export { decodeWebsocketMessageManifestSuccess }
+export { encodeWebsocketMessageManifestSuccess }
+export { decodeWebsocketMessageManifestFailure }
+export { encodeWebsocketMessageManifestFailure } \ No newline at end of file
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index e65319dc4..477ce5d0b 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -248,10 +248,13 @@ smol WebsocketMessageKind {
file_change_notification = 2;
build_success = 3;
build_fail = 4;
+ manifest_success = 5;
+ manifest_fail = 6;
}
smol WebsocketCommandKind {
build = 1;
+ manifest = 2;
}
// Each websocket message has two messages in it!
@@ -281,6 +284,10 @@ struct WebsocketCommandBuild {
uint32 id;
}
+struct WebsocketCommandManifest {
+ uint32 id;
+}
+
// We copy the module_path here incase they don't already have it
struct WebsocketMessageBuildSuccess {
uint32 id;
@@ -303,6 +310,37 @@ struct WebsocketMessageBuildFailure {
Log log;
}
+// CSS @import only for now!
+struct DependencyManifest {
+ uint32[] ids;
+}
+
+struct FileList {
+ StringPointer[] ptrs;
+ string files;
+}
+
+struct WebsocketMessageResolveIDs {
+ uint32[] id;
+ FileList list;
+}
+
+struct WebsocketCommandResolveIDs {
+ StringPointer[] ptrs;
+ string files;
+}
+struct WebsocketMessageManifestSuccess {
+ uint32 id;
+ string module_path;
+ Loader loader;
+ DependencyManifest manifest;
+}
+struct WebsocketMessageManifestFailure {
+ uint32 id;
+ uint32 from_timestamp;
+ Loader loader;
+ Log log;
+} \ No newline at end of file
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 6c4b539f8..00460fcc6 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -1285,6 +1285,12 @@ pub const Api = struct {
/// build_fail
build_fail,
+ /// manifest_success
+ manifest_success,
+
+ /// manifest_fail
+ manifest_fail,
+
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
@@ -1297,6 +1303,9 @@ pub const Api = struct {
/// build
build,
+ /// manifest
+ manifest,
+
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
@@ -1399,6 +1408,22 @@ pub const Api = struct {
}
};
+ pub const WebsocketCommandManifest = packed struct {
+ /// id
+ id: u32 = 0,
+
+ pub fn decode(reader: anytype) anyerror!WebsocketCommandManifest {
+ var this = std.mem.zeroes(WebsocketCommandManifest);
+
+ this.id = try reader.readValue(u32);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.id);
+ }
+ };
+
pub const WebsocketMessageBuildSuccess = struct {
/// id
id: u32 = 0,
@@ -1470,6 +1495,84 @@ pub const Api = struct {
try writer.writeValue(this.log);
}
};
+
+ pub const DependencyManifest = struct {
+ /// ids
+ ids: []const u32,
+
+ pub fn decode(reader: anytype) anyerror!DependencyManifest {
+ var this = std.mem.zeroes(DependencyManifest);
+
+ this.ids = try reader.readArray(u32);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeArray(u32, this.ids);
+ }
+ };
+
+ pub const WebsocketMessageManifestSuccess = struct {
+ /// id
+ id: u32 = 0,
+
+ /// module_path
+ module_path: []const u8,
+
+ /// loader
+ loader: Loader,
+
+ /// manifest
+ manifest: DependencyManifest,
+
+ pub fn decode(reader: anytype) anyerror!WebsocketMessageManifestSuccess {
+ var this = std.mem.zeroes(WebsocketMessageManifestSuccess);
+
+ this.id = try reader.readValue(u32);
+ this.module_path = try reader.readValue([]const u8);
+ this.loader = try reader.readValue(Loader);
+ this.manifest = try reader.readValue(DependencyManifest);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.id);
+ try writer.writeValue(this.module_path);
+ try writer.writeEnum(this.loader);
+ try writer.writeValue(this.manifest);
+ }
+ };
+
+ pub const WebsocketMessageManifestFailure = struct {
+ /// id
+ id: u32 = 0,
+
+ /// from_timestamp
+ from_timestamp: u32 = 0,
+
+ /// loader
+ loader: Loader,
+
+ /// log
+ log: Log,
+
+ pub fn decode(reader: anytype) anyerror!WebsocketMessageManifestFailure {
+ var this = std.mem.zeroes(WebsocketMessageManifestFailure);
+
+ this.id = try reader.readValue(u32);
+ this.from_timestamp = try reader.readValue(u32);
+ this.loader = try reader.readValue(Loader);
+ this.log = try reader.readValue(Log);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.id);
+ try writer.writeInt(this.from_timestamp);
+ try writer.writeEnum(this.loader);
+ try writer.writeValue(this.log);
+ }
+ };
};
const ExamplePackedStruct = packed struct {
diff --git a/src/bundler.zig b/src/bundler.zig
index c1ab43638..b4226d4d9 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -611,7 +611,7 @@ pub fn NewBundler(cache_files: bool) type {
var part = &ast.parts[ast.parts.len - 1];
var new_stmts: [1]Stmt = undefined;
- var register_args: [4]Expr = undefined;
+ var register_args: [3]Expr = undefined;
var package_json_string = E.String{ .utf8 = package.name };
var module_path_string = E.String{ .utf8 = package_relative_path };
@@ -850,6 +850,8 @@ pub fn NewBundler(cache_files: bool) type {
comptime import_path_format: options.BundleOptions.ImportPathFormat,
file_descriptor: ?StoredFileDescriptorType,
filepath_hash: u32,
+ comptime WatcherType: type,
+ watcher: *WatcherType,
) !BuildResolveResultPair {
if (resolve_result.is_external) {
return BuildResolveResultPair{
@@ -866,28 +868,59 @@ pub fn NewBundler(cache_files: bool) type {
var old_bundler_allocator = bundler.allocator;
bundler.allocator = allocator;
defer bundler.allocator = old_bundler_allocator;
- var result = bundler.parse(allocator, file_path, loader, resolve_result.dirname_fd, file_descriptor, filepath_hash) orelse {
- bundler.resetStore();
- return BuildResolveResultPair{
- .written = 0,
- .input_fd = null,
- };
- };
var old_linker_allocator = bundler.linker.allocator;
defer bundler.linker.allocator = old_linker_allocator;
bundler.linker.allocator = allocator;
- try bundler.linker.link(file_path, &result, import_path_format);
-
- return BuildResolveResultPair{
- .written = try bundler.print(
- result,
- Writer,
- writer,
- ),
- .input_fd = result.input_fd,
- };
- // output_file.version = if (resolve_result.is_from_node_modules) resolve_result.package_json_version else null;
+ switch (loader) {
+ .css => {
+ const CSSBundler = Css.NewBundler(
+ Writer,
+ @TypeOf(&bundler.linker),
+ @TypeOf(&bundler.resolver.caches.fs),
+ WatcherType,
+ @TypeOf(bundler.fs),
+ );
+
+ return BuildResolveResultPair{
+ .written = try CSSBundler.runWithResolveResult(
+ resolve_result,
+ bundler.fs,
+ writer,
+ watcher,
+ &bundler.resolver.caches.fs,
+ filepath_hash,
+ file_descriptor,
+ allocator,
+ bundler.log,
+ &bundler.linker,
+ ),
+ .input_fd = file_descriptor,
+ };
+ },
+ else => {
+ var result = bundler.parse(allocator, file_path, loader, resolve_result.dirname_fd, file_descriptor, filepath_hash) orelse {
+ bundler.resetStore();
+ return BuildResolveResultPair{
+ .written = 0,
+ .input_fd = null,
+ };
+ };
+
+ try bundler.linker.link(file_path, &result, import_path_format);
+
+ return BuildResolveResultPair{
+ .written = try bundler.print(
+ result,
+ Writer,
+ writer,
+ ),
+ .input_fd = result.input_fd,
+ };
+ // output_file.version = if (resolve_result.is_from_node_modules) resolve_result.package_json_version else null;
+
+ },
+ }
}
pub fn buildWithResolveResultEager(
@@ -972,6 +1005,7 @@ pub fn NewBundler(cache_files: bool) type {
std.fs.File,
@TypeOf(&bundler.linker),
import_path_format,
+ void,
);
const entry = bundler.resolver.caches.fs.readFile(
bundler.fs,
@@ -1149,7 +1183,7 @@ pub fn NewBundler(cache_files: bool) type {
bundler.fs,
path.text,
dirname_fd,
- !cache_files,
+ true,
file_descriptor,
) catch return null;
@@ -1195,9 +1229,7 @@ pub fn NewBundler(cache_files: bool) type {
.input_fd = entry.fd,
};
},
- .css => {
- return null;
- },
+ .css => {},
else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }),
}
@@ -1348,7 +1380,7 @@ pub fn NewBundler(cache_files: bool) type {
const loader = bundler.options.loaders.get(resolved.path_pair.primary.name.ext) orelse .file;
switch (loader) {
- .js, .jsx, .ts, .tsx, .json => {
+ .js, .jsx, .ts, .tsx, .json, .css => {
return ServeResult{
.file = options.OutputFile.initPending(loader, resolved),
.mime_type = MimeType.byLoader(
diff --git a/src/css_scanner.zig b/src/css_scanner.zig
index 42c7a5778..85acc6de4 100644
--- a/src/css_scanner.zig
+++ b/src/css_scanner.zig
@@ -5,9 +5,9 @@ const options = @import("./options.zig");
const import_record = @import("import_record.zig");
const logger = @import("./logger.zig");
const Options = options;
-
+const resolver = @import("./resolver/resolver.zig");
const _linker = @import("./linker.zig");
-
+const alloc = @import("./alloc.zig");
const replacementCharacter: CodePoint = 0xFFFD;
pub const Chunk = struct {
@@ -316,7 +316,7 @@ pub const Scanner = struct {
return text;
}
- pub fn next(scanner: *Scanner, comptime WriterType: type, writer: WriterType, writeChunk: (fn (ctx: WriterType, Chunk) anyerror!void)) !void {
+ pub fn next(scanner: *Scanner, comptime import_behavior: ImportBehavior, comptime WriterType: type, writer: WriterType, writeChunk: (fn (ctx: WriterType, Chunk) anyerror!void)) !void {
scanner.has_newline_before = scanner.end == 0;
scanner.has_delimiter_before = false;
scanner.step();
@@ -359,8 +359,18 @@ pub const Scanner = struct {
':', ',' => {
scanner.has_delimiter_before = true;
},
+ '{', '}' => {
+ scanner.has_delimiter_before = false;
+
+ // Heuristic:
+ // If we're only scanning the imports, as soon as there's a curly brace somewhere we can assume that @import is done.
+ // @import only appears at the top of the file. Only @charset is allowed to be above it.
+ if (import_behavior == .scan) {
+ return;
+ }
+ },
// this is a little hacky, but it should work since we're not parsing scopes
- '{', '}', ';' => {
+ ';' => {
scanner.has_delimiter_before = false;
},
'u', 'U' => {
@@ -421,11 +431,13 @@ pub const Scanner = struct {
chunk.content = .{ .t_verbatim = .{} };
// flush the pending chunk
try writeChunk(writer, chunk);
+
chunk.range.loc.start = @intCast(i32, url_start);
chunk.range.len = @intCast(i32, scanner.end) - chunk.range.loc.start;
chunk.content = .{ .t_url = url_text };
try writeChunk(writer, chunk);
scanner.has_delimiter_before = false;
+
continue :restart;
},
@@ -590,9 +602,11 @@ pub const Scanner = struct {
}
scanner.step();
}
- chunk.range.len = @intCast(i32, scanner.end) - std.math.max(chunk.range.loc.start, 0);
- chunk.content = .{ .t_import = import };
- try writeChunk(writer, chunk);
+ if (import_behavior == .scan or import_behavior == .keep) {
+ chunk.range.len = @intCast(i32, scanner.end) - std.math.max(chunk.range.loc.start, 0);
+ chunk.content = .{ .t_import = import };
+ try writeChunk(writer, chunk);
+ }
scanner.step();
continue :restart;
},
@@ -751,10 +765,13 @@ pub fn isHex(c: CodePoint) ?CodePoint {
};
}
+pub const ImportBehavior = enum { keep, omit, scan };
+
pub fn NewWriter(
comptime WriterType: type,
comptime LinkerType: type,
comptime import_path_format: Options.BundleOptions.ImportPathFormat,
+ comptime BuildContextType: type,
) type {
return struct {
const Writer = @This();
@@ -763,6 +780,7 @@ pub fn NewWriter(
linker: LinkerType,
source: *const logger.Source,
written: usize = 0,
+ buildCtx: BuildContextType = undefined,
pub fn init(
source: *const logger.Source,
@@ -777,6 +795,28 @@ pub fn NewWriter(
};
}
+ pub fn scan(writer: *Writer, log: *logger.Log, allocator: *std.mem.Allocator) !void {
+ var scanner = Scanner.init(
+ log,
+
+ allocator,
+ writer.source,
+ );
+
+ try scanner.next(.scan, @TypeOf(writer), writer, scanChunk);
+ }
+
+ pub fn append(writer: *Writer, log: *logger.Log, allocator: *std.mem.Allocator) !void {
+ var scanner = Scanner.init(
+ log,
+
+ allocator,
+ writer.source,
+ );
+
+ try scanner.next(.omit, @TypeOf(writer), writer, writeBundledChunk);
+ }
+
pub fn run(writer: *Writer, log: *logger.Log, allocator: *std.mem.Allocator) !void {
var scanner = Scanner.init(
log,
@@ -785,7 +825,7 @@ pub fn NewWriter(
writer.source,
);
- try scanner.next(@TypeOf(writer), writer, writeChunk);
+ try scanner.next(.keep, @TypeOf(writer), writer, commitChunk);
}
fn writeString(writer: *Writer, str: string, quote: Chunk.TextContent.Quote) !void {
@@ -847,7 +887,34 @@ pub fn NewWriter(
}
}
- pub fn writeChunk(writer: *Writer, chunk: Chunk) !void {
+ pub fn scanChunk(writer: *Writer, chunk: Chunk) !void {
+ switch (chunk.content) {
+ .t_url => |url| {},
+ .t_import => |import| {
+ try writer.buildCtx.addCSSImport(
+ try writer.linker.resolveCSS(
+ writer.source.path,
+ import.text.utf8,
+ chunk.range,
+ import_record.ImportKind.at,
+ Options.BundleOptions.ImportPathFormat.absolute_path,
+ true,
+ ),
+ );
+ },
+ .t_verbatim => |verbatim| {},
+ }
+ }
+
+ pub fn commitChunk(writer: *Writer, chunk: Chunk) !void {
+ return try writeChunk(writer, chunk, false);
+ }
+
+ pub fn writeBundledChunk(writer: *Writer, chunk: Chunk) !void {
+ return try writeChunk(writer, chunk, true);
+ }
+
+ pub fn writeChunk(writer: *Writer, chunk: Chunk, comptime omit_imports: bool) !void {
switch (chunk.content) {
.t_url => |url| {
const url_str = try writer.linker.resolveCSS(
@@ -856,31 +923,36 @@ pub fn NewWriter(
chunk.range,
import_record.ImportKind.url,
import_path_format,
+ true,
);
try writer.writeURL(url_str, url);
},
.t_import => |import| {
- const url_str = try writer.linker.resolveCSS(
- writer.source.path,
- import.text.utf8,
- chunk.range,
- import_record.ImportKind.at,
- import_path_format,
- );
+ if (!omit_imports) {
+ const url_str = try writer.linker.resolveCSS(
+ writer.source.path,
+ import.text.utf8,
+ chunk.range,
+ import_record.ImportKind.at,
+ import_path_format,
+ false,
+ );
- try writer.ctx.writeAll("@import ");
- writer.written += "@import ".len;
+ try writer.ctx.writeAll("@import ");
+ writer.written += "@import ".len;
- if (import.url) {
- try writer.writeURL(url_str, import.text);
- } else {
- try writer.writeString(url_str, import.text.quote);
- }
+ if (import.url) {
+ try writer.writeURL(url_str, import.text);
+ } else {
+ try writer.writeString(url_str, import.text.quote);
+ }
- try writer.ctx.writeAll(import.suffix);
- writer.written += import.suffix.len;
- try writer.ctx.writeAll("\n");
- writer.written += 1;
+ try writer.ctx.writeAll(import.suffix);
+ writer.written += import.suffix.len;
+ try writer.ctx.writeAll("\n");
+
+ writer.written += 1;
+ }
},
.t_verbatim => |verbatim| {
defer writer.written += @intCast(usize, chunk.range.len);
@@ -891,7 +963,7 @@ pub fn NewWriter(
usize,
@intCast(
usize,
- chunk.range.len + chunk.range.loc.start,
+ chunk.range.len,
),
),
);
@@ -908,3 +980,140 @@ pub fn NewWriter(
}
};
}
+
+const ImportQueueFifo = std.fifo.LinearFifo(u32, .Dynamic);
+const QueuedList = std.ArrayList(u32);
+threadlocal var global_queued: QueuedList = undefined;
+threadlocal var global_import_queud: ImportQueueFifo = undefined;
+threadlocal var global_bundle_queud: QueuedList = undefined;
+threadlocal var has_set_global_queue = false;
+threadlocal var int_buf_print: [256]u8 = undefined;
+pub fn NewBundler(
+ comptime Writer: type,
+ comptime Linker: type,
+ comptime FileReader: type,
+ comptime Watcher: type,
+ comptime FSType: type,
+) type {
+ return struct {
+ const CSSBundler = @This();
+ queued: *QueuedList,
+ import_queue: *ImportQueueFifo,
+ bundle_queue: *QueuedList,
+ writer: Writer,
+ watcher: *Watcher,
+ fs_reader: FileReader,
+ fs: FSType,
+ allocator: *std.mem.Allocator,
+ pub fn runWithResolveResult(
+ resolve_result: resolver.Result,
+ fs: FSType,
+ writer: Writer,
+ watcher: *Watcher,
+ fs_reader: FileReader,
+ hash: u32,
+ input_fd: ?StoredFileDescriptorType,
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ linker: Linker,
+ ) !usize {
+ if (!has_set_global_queue) {
+ global_queued = QueuedList.init(alloc.static);
+ global_import_queud = ImportQueueFifo.init(alloc.static);
+ global_bundle_queud = QueuedList.init(alloc.static);
+ has_set_global_queue = true;
+ } else {
+ global_queued.clearRetainingCapacity();
+ global_import_queud.head = 0;
+ global_import_queud.count = 0;
+ global_bundle_queud.clearRetainingCapacity();
+ }
+
+ var this = CSSBundler{
+ .queued = &global_queued,
+ .import_queue = &global_import_queud,
+ .bundle_queue = &global_bundle_queud,
+ .writer = writer,
+ .fs_reader = fs_reader,
+ .fs = fs,
+
+ .allocator = allocator,
+ .watcher = watcher,
+ };
+ const CSSWriter = NewWriter(*CSSBundler, Linker, .absolute_url, *CSSBundler);
+
+ var css = CSSWriter.init(
+ undefined,
+ &this,
+ linker,
+ );
+ css.buildCtx = &this;
+
+ try this.addCSSImport(resolve_result.path_pair.primary.text);
+
+ while (this.import_queue.readItem()) |item| {
+ const watcher_id = this.watcher.indexOf(item) orelse unreachable;
+ const watch_item = this.watcher.watchlist.get(watcher_id);
+ const source = try this.getSource(watch_item.file_path, watch_item.fd);
+ css.source = &source;
+ try css.scan(log, allocator);
+ }
+
+ // We LIFO
+ var i: i32 = @intCast(i32, this.bundle_queue.items.len - 1);
+ while (i >= 0) : (i -= 1) {
+ const item = this.bundle_queue.items[@intCast(usize, i)];
+ const watcher_id = this.watcher.indexOf(item) orelse unreachable;
+ const watch_item = this.watcher.watchlist.get(watcher_id);
+ const source = try this.getSource(watch_item.file_path, watch_item.fd);
+ css.source = &source;
+ const file_path = fs.relativeTo(watch_item.file_path);
+ if (FeatureFlags.css_supports_fence) {
+ try this.writeAll("\n@supports (hmr-watch-id:");
+ const int_buf_size = std.fmt.formatIntBuf(&int_buf_print, item, 10, .upper, .{});
+ try this.writeAll(int_buf_print[0..int_buf_size]);
+ try this.writeAll(") and (hmr-file:\"");
+ try this.writeAll(file_path);
+ try this.writeAll("\") {}\n");
+ }
+ try this.writeAll("/* ");
+ try this.writeAll(file_path);
+ try this.writeAll("*/\n");
+ try css.append(log, allocator);
+ }
+
+ try this.writer.done();
+ return css.written;
+ }
+
+ pub fn getSource(this: *CSSBundler, url: string, input_fd: StoredFileDescriptorType) !logger.Source {
+ const entry = try this.fs_reader.readFile(this.fs, url, 0, true, input_fd);
+ const file = Fs.File{ .path = Fs.Path.init(url), .contents = entry.contents };
+ return logger.Source.initFile(file, this.allocator);
+ }
+
+ pub fn addCSSImport(this: *CSSBundler, absolute_path: string) !void {
+ const hash = Watcher.getHash(absolute_path);
+ if (this.queued.items.len > 0 and std.mem.indexOfScalar(u32, this.queued.items, hash) != null) {
+ return;
+ }
+
+ const watcher_index = this.watcher.indexOf(hash);
+
+ if (watcher_index == null) {
+ var file = try std.fs.openFileAbsolute(absolute_path, .{ .read = true });
+ try this.watcher.appendFile(file.handle, absolute_path, hash, .css, true);
+ }
+
+ try this.import_queue.writeItem(hash);
+ try this.queued.append(hash);
+ try this.bundle_queue.append(hash);
+ }
+
+ pub fn writeAll(this: *CSSBundler, buf: anytype) !void {
+ _ = try this.writer.writeAll(buf);
+ }
+
+ // pub fn copyFileRange(this: *CSSBundler, buf: anytype) !void {}
+ };
+}
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index 0078d4cb9..09264153c 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -29,6 +29,8 @@ pub const tracing = true;
pub const verbose_watcher = true;
+pub const css_supports_fence = true;
+
pub const CSSModulePolyfill = enum {
// When you import a .css file and you reference the import in JavaScript
// Just return whatever the property key they referenced was
diff --git a/src/http.zig b/src/http.zig
index e6184915e..51949b713 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -10,6 +10,7 @@ const js_ast = @import("./js_ast.zig");
const bundler = @import("bundler.zig");
const logger = @import("logger.zig");
const Fs = @import("./fs.zig");
+const Options = @import("./options.zig");
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
@@ -310,12 +311,13 @@ pub const RequestContext = struct {
}
pub fn sendNotFound(req: *RequestContext) !void {
+ defer req.done();
try req.writeStatus(404);
try req.flushHeaders();
- req.done();
}
pub fn sendInternalError(ctx: *RequestContext, err: anytype) !void {
+ defer ctx.done();
try ctx.writeStatus(500);
const printed = std.fmt.bufPrint(&error_buf, "Error: {s}", .{@errorName(err)}) catch |err2| brk: {
if (isDebug or isTest) {
@@ -332,15 +334,15 @@ pub const RequestContext = struct {
threadlocal var error_buf: [4096]u8 = undefined;
pub fn sendNotModified(ctx: *RequestContext) !void {
+ defer ctx.done();
try ctx.writeStatus(304);
try ctx.flushHeaders();
- ctx.done();
}
pub fn sendNoContent(ctx: *RequestContext) !void {
+ defer ctx.done();
try ctx.writeStatus(204);
try ctx.flushHeaders();
- ctx.done();
}
pub fn appendHeader(ctx: *RequestContext, comptime key: string, value: string) void {
@@ -858,6 +860,22 @@ pub const RequestContext = struct {
_ = try std.Thread.spawn(WebsocketHandler.handle, handler);
}
+ pub fn auto500(ctx: *RequestContext) void {
+ if (ctx.has_called_done) {
+ return;
+ }
+
+ defer ctx.done();
+
+ if (ctx.status == null) {
+ ctx.writeStatus(500) catch {};
+ }
+
+ if (!ctx.has_written_last_header) {
+ ctx.flushHeaders() catch {};
+ }
+ }
+
pub fn handleGet(ctx: *RequestContext) !void {
if (strings.eqlComptime(ctx.url.extname, "jsb") and ctx.bundler.options.node_modules_bundle != null) {
return try ctx.sendJSB();
@@ -868,6 +886,8 @@ pub const RequestContext = struct {
return;
}
+ // errdefer ctx.auto500();
+
const result = try ctx.bundler.buildFile(
&ctx.log,
ctx.allocator,
@@ -902,10 +922,11 @@ pub const RequestContext = struct {
const SocketPrinterInternal = struct {
const SocketPrinterInternal = @This();
rctx: *RequestContext,
+ _loader: Options.Loader,
threadlocal var buffer: MutableString = undefined;
threadlocal var has_loaded_buffer: bool = false;
- pub fn init(rctx: *RequestContext) SocketPrinterInternal {
+ pub fn init(rctx: *RequestContext, _loader: Options.Loader) SocketPrinterInternal {
// if (isMac) {
// _ = std.os.fcntl(file.handle, std.os.F_NOCACHE, 1) catch 0;
// }
@@ -919,6 +940,7 @@ pub const RequestContext = struct {
return SocketPrinterInternal{
.rctx = rctx,
+ ._loader = _loader,
};
}
pub fn writeByte(_ctx: *SocketPrinterInternal, byte: u8) anyerror!usize {
@@ -950,7 +972,9 @@ pub const RequestContext = struct {
}
if (FeatureFlags.strong_etags_for_built_files) {
- if (buf.len < 16 * 16 * 16 * 16) {
+ // Always cache css & json files, even big ones
+ // css is especially important because we want to try and skip having the browser parse it whenever we can
+ if (buf.len < 16 * 16 * 16 * 16 or chunky._loader == .css or chunky._loader == .json) {
const strong_etag = std.hash.Wyhash.hash(1, buf);
const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, .upper, .{});
@@ -983,13 +1007,15 @@ pub const RequestContext = struct {
SocketPrinterInternal.getLastByte,
SocketPrinterInternal.getLastLastByte,
);
+ const loader = ctx.bundler.options.loaders.get(result.file.input.name.ext) orelse .file;
- var chunked_encoder = SocketPrinter.init(SocketPrinterInternal.init(ctx));
+ var chunked_encoder = SocketPrinter.init(
+ SocketPrinterInternal.init(ctx, loader),
+ );
// It will call flush for us automatically
defer ctx.bundler.resetStore();
- const loader = ctx.bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
- var written = try ctx.bundler.buildWithResolveResult(
+ var written = ctx.bundler.buildWithResolveResult(
resolve_result,
ctx.allocator,
loader,
@@ -998,17 +1024,32 @@ pub const RequestContext = struct {
.absolute_url,
input_fd,
hash,
- );
- if (written.input_fd) |written_fd| {
- try ctx.watcher.addFile(
- written_fd,
- result.file.input.text,
- hash,
- loader,
- true,
- );
- if (ctx.watcher.watchloop_handle == null) {
- try ctx.watcher.start();
+ Watcher,
+ ctx.watcher,
+ ) catch |err| {
+ ctx.sendInternalError(err) catch {};
+ return;
+ };
+
+ // CSS handles this specially
+ if (loader != .css) {
+ if (written.input_fd) |written_fd| {
+ try ctx.watcher.addFile(
+ written_fd,
+ result.file.input.text,
+ hash,
+ loader,
+ true,
+ );
+ if (ctx.watcher.watchloop_handle == null) {
+ try ctx.watcher.start();
+ }
+ }
+ } else {
+ if (written.written > 0) {
+ if (ctx.watcher.watchloop_handle == null) {
+ try ctx.watcher.start();
+ }
}
}
},
diff --git a/src/http/mime_type.zig b/src/http/mime_type.zig
index 02d6739be..b7139d1c0 100644
--- a/src/http/mime_type.zig
+++ b/src/http/mime_type.zig
@@ -26,7 +26,7 @@ pub const Category = enum {
};
pub const other = MimeType.init("application/octet-stream", .other);
-pub const css = MimeType.init("application/octet-stream", .other);
+pub const css = MimeType.init("text/css", .css);
pub const javascript = MimeType.init("text/javascript;charset=utf-8", .javascript);
pub const ico = MimeType.init("image/vnd.microsoft.icon", .image);
diff --git a/src/js_printer.zig b/src/js_printer.zig
index d55473481..69e6ba03c 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -3556,6 +3556,18 @@ pub fn NewWriter(
};
}
+ pub fn isCopyFileRangeSupported() bool {
+ return comptime std.meta.trait.hasFn("copyFileRange")(ContextType);
+ }
+
+ pub fn copyFileRange(ctx: ContextType, in_file: StoredFileDescriptorType, start: usize, end: usize) !void {
+ ctx.sendfile(
+ in_file,
+ start,
+ end,
+ );
+ }
+
pub fn getError(writer: *const Self) anyerror!void {
if (writer.orig_err) |orig_err| {
return orig_err;
@@ -3577,9 +3589,9 @@ pub fn NewWriter(
pub const Error = error{FormatError};
pub fn writeAll(writer: *Self, bytes: anytype) Error!usize {
- const written = writer.written;
+ const written = std.math.max(writer.written, 0);
writer.print(@TypeOf(bytes), bytes);
- return writer.written - written;
+ return @intCast(usize, writer.written) - @intCast(usize, written);
}
pub inline fn print(writer: *Self, comptime ValueType: type, str: ValueType) void {
diff --git a/src/linker.zig b/src/linker.zig
index 9c20718e0..0f7b1325b 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -113,13 +113,14 @@ pub fn NewLinker(comptime BundlerType: type) type {
range: logger.Range,
kind: ImportKind,
comptime import_path_format: Options.BundleOptions.ImportPathFormat,
+ comptime resolve_only: bool,
) !string {
const dir = path.name.dirWithTrailingSlash();
switch (kind) {
.at => {
var resolve_result = try this.resolver.resolve(dir, url, .at);
- if (resolve_result.is_external) {
+ if (resolve_only or resolve_result.is_external) {
return resolve_result.path_pair.primary.text;
}
@@ -132,7 +133,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
},
.at_conditional => {
var resolve_result = try this.resolver.resolve(dir, url, .at_conditional);
- if (resolve_result.is_external) {
+ if (resolve_only or resolve_result.is_external) {
return resolve_result.path_pair.primary.text;
}
@@ -144,7 +145,7 @@ pub fn NewLinker(comptime BundlerType: type) type {
},
.url => {
var resolve_result = try this.resolver.resolve(dir, url, .url);
- if (resolve_result.is_external) {
+ if (resolve_only or resolve_result.is_external) {
return resolve_result.path_pair.primary.text;
}
@@ -420,6 +421,11 @@ pub fn NewLinker(comptime BundlerType: type) type {
comptime import_path_format: Options.BundleOptions.ImportPathFormat,
) !Fs.Path {
switch (import_path_format) {
+ .absolute_path => {
+ var relative_name = linker.fs.relative(source_dir, source_path);
+
+ return Fs.Path.initWithPretty(source_path, relative_name);
+ },
.relative => {
var relative_name = linker.fs.relative(source_dir, source_path);
var pretty: string = undefined;
@@ -491,18 +497,35 @@ pub fn NewLinker(comptime BundlerType: type) type {
basename = try linker.getHashedFilename(basepath, null);
}
- const absolute_url = try std.fmt.allocPrint(
- linker.allocator,
- "{s}{s}{s}{s}",
- .{
- linker.options.public_url,
- dirname,
- basename,
- absolute_pathname.ext,
- },
- );
-
- return Fs.Path.initWithPretty(absolute_url, absolute_url);
+ const needs_slash = dirname.len > 0 and dirname[dirname.len - 1] != '/';
+
+ if (needs_slash) {
+ const absolute_url = try std.fmt.allocPrint(
+ linker.allocator,
+ "{s}{s}/{s}{s}",
+ .{
+ linker.options.public_url,
+ dirname,
+ basename,
+ absolute_pathname.ext,
+ },
+ );
+
+ return Fs.Path.initWithPretty(absolute_url, absolute_url);
+ } else {
+ const absolute_url = try std.fmt.allocPrint(
+ linker.allocator,
+ "{s}{s}{s}{s}",
+ .{
+ linker.options.public_url,
+ dirname,
+ basename,
+ absolute_pathname.ext,
+ },
+ );
+
+ return Fs.Path.initWithPretty(absolute_url, absolute_url);
+ }
},
else => unreachable,
diff --git a/src/runtime/hmr.ts b/src/runtime/hmr.ts
index 1d7a41c21..41e756778 100644
--- a/src/runtime/hmr.ts
+++ b/src/runtime/hmr.ts
@@ -8,6 +8,120 @@ function formatDuration(duration: number) {
return Math.round(duration * 1000) / 1000;
}
+class StringListPointer {
+ ptr: API.StringPointer;
+ source_index: number;
+}
+
+// How this works
+// The first time you load a <link rel="stylesheet">
+// It loads via @import. The natural way.
+// Then, you change a file. Say, button.css:
+// @import chain:
+// index.css -> link.css -> button.css -> foo.css
+// HTML:
+// <link rel="stylesheet" href="./index.css">
+// Now, we need to update "button.css". But, we can't control that.
+// Instead, we replace '<link rel="stylesheet" href="./index.css">'
+// With:
+// - <link rel="stylesheet" href="/_assets/1290123980123.css?noimport">
+// - <link rel="stylesheet" href="/_assets/1290123980123.css?noimport">
+// - <link rel="stylesheet" href="/_assets/1290123980123.css?noimport">
+// - <link rel="stylesheet" href="/_assets/1290123980123.css?noimport">
+// Now, say you update "link.css".
+// This time, we replace:
+// <link rel="stylesheet" href="./link.css?noimport">
+// With:
+// <link rel="stylesheet" href="./link.css?noimport&${from_timestamp}">
+export class CSSLoader {
+ hmr: HMRClient;
+ manifest?: API.DependencyManifest;
+
+ stringList: string[] = [];
+ idMap: Map<number, StringListPointer> = new Map();
+
+ selectorForId(id: number) {
+ return `hmr__${id.toString(10)}`;
+ }
+
+ fetchLinkTagById(id: number) {
+ const selector = this.selectorForId(id);
+ var element: HTMLLinkElement = document.querySelector(selector);
+
+ if (!element) {
+ element = document.createElement("link");
+ element.setAttribute("rel", "stylesheet");
+ element.setAttribute("id", selector);
+ element.setAttribute("href", `/_assets/${id}.css?noimport`);
+ }
+
+ return element;
+ }
+
+ handleManifestSuccess(buffer: ByteBuffer, timestamp: number) {
+ const success = API.decodeWebsocketMessageManifestSuccess(buffer);
+ if (success.loader !== API.Loader.css) {
+ __hmrlog.warn(
+ "Ignoring unimplemented loader:",
+ API.LoaderKeys[success.loader]
+ );
+ return;
+ }
+
+ const rootSelector = this.selectorForId(success.id);
+ let rootLinkTag: HTMLLinkElement = document.querySelector(rootSelector);
+ if (!rootLinkTag) {
+ for (let linkTag of document.querySelectorAll("link")) {
+ if (
+ new URL(linkTag.href, location.href).pathname.substring(1) ===
+ success.module_path
+ ) {
+ rootLinkTag = linkTag;
+ break;
+ }
+ }
+ }
+
+ if (!rootLinkTag) {
+ __hmrlog.debug("Skipping unknown CSS file", success.module_path);
+ return;
+ }
+
+ const elementList: HTMLLinkElement = new Array();
+ for (let i = 0; i < success.manifest.files.length; i++) {}
+ }
+ handleManifestFail(buffer: ByteBuffer, timestamp: number) {}
+ static request_manifest_buf: Uint8Array = undefined;
+ handleFileChangeNotification(
+ file_change_notification: API.WebsocketMessageFileChangeNotification,
+ timestamp: number
+ ) {
+ if (!CSSLoader.request_manifest_buf) {
+ CSSLoader.request_manifest_buf = new Uint8Array(255);
+ }
+ var buf = new ByteBuffer(CSSLoader.request_manifest_buf);
+ API.encodeWebsocketCommand(
+ {
+ kind: API.WebsocketCommandKind.manifest,
+ timestamp,
+ },
+ buf
+ );
+ API.encodeWebsocketCommandManifest(
+ {
+ id: file_change_notification.id,
+ },
+ buf
+ );
+
+ try {
+ this.hmr.socket.send(buf._data.subarray(0, buf._index));
+ } catch (exception) {
+ __hmrlog.error(exception);
+ }
+ }
+}
+
class HMRClient {
static client: HMRClient;
socket: WebSocket;
@@ -17,6 +131,9 @@ class HMRClient {
// This so we can send timestamps as uint32 instead of 128-bit integers
epoch: number = 0;
+ loaders = {
+ css: new CSSLoader(),
+ };
start() {
if (runOnce) {
__hmrlog.warn(
@@ -25,6 +142,7 @@ class HMRClient {
return;
}
+ this.loaders.css.hmr = this;
runOnce = true;
this.connect();
}
@@ -159,6 +277,22 @@ class HMRClient {
handleFileChangeNotification(buffer: ByteBuffer, timestamp: number) {
const notification =
API.decodeWebsocketMessageFileChangeNotification(buffer);
+ if (notification.loader === API.Loader.css) {
+ if (typeof window === "undefined") {
+ __hmrlog.debug(`Skipping CSS on non-webpage environment`);
+ return;
+ }
+
+ if ((this.builds.get(notification.id) || -Infinity) > timestamp) {
+ __hmrlog.debug(`Skipping outdated update`);
+ return;
+ }
+
+ this.loaders.css.handleFileChangeNotification(notification, timestamp);
+ this.builds.set(notification.id, timestamp);
+ return;
+ }
+
const index = HMRModule.dependencies.graph.indexOf(notification.id);
if (index === -1) {
@@ -215,6 +349,14 @@ class HMRClient {
this.handleBuildSuccess(buffer, header.timestamp);
break;
}
+ case API.WebsocketMessageKind.manifest_success: {
+ this.loaders.css.handleManifestSuccess(buffer, header.timestamp);
+ break;
+ }
+ case API.WebsocketMessageKind.manifest_fail: {
+ this.loaders.css.handleManifestFail(buffer, header.timestamp);
+ break;
+ }
case API.WebsocketMessageKind.file_change_notification: {
this.handleFileChangeNotification(buffer, header.timestamp);
break;
diff --git a/src/watcher.zig b/src/watcher.zig
index 6a430a32c..e840bbca6 100644
--- a/src/watcher.zig
+++ b/src/watcher.zig
@@ -85,6 +85,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
.watchlist = Watchlist{},
.mutex = sync.Mutex.init(),
};
+
return watcher;
}
@@ -183,6 +184,17 @@ pub fn NewWatcher(comptime ContextType: type) type {
return;
}
+ try this.appendFile(fd, file_path, hash, loader, copy_file_path);
+ }
+
+ pub fn appendFile(
+ this: *Watcher,
+ fd: StoredFileDescriptorType,
+ file_path: string,
+ hash: u32,
+ loader: options.Loader,
+ comptime copy_file_path: bool,
+ ) !void {
try this.watchlist.ensureUnusedCapacity(this.allocator, 1);
// https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html