diff options
author | 2021-06-14 19:45:51 -0700 | |
---|---|---|
committer | 2021-06-14 19:45:51 -0700 | |
commit | 7eb887edd527713214790198de3ccfe828a5768d (patch) | |
tree | d0a782385e6aeccf62b698fe5c15f1833aba3719 /src | |
parent | 122ef023dd642af830a5419b6172640ebf8af841 (diff) | |
download | bun-7eb887edd527713214790198de3ccfe828a5768d.tar.gz bun-7eb887edd527713214790198de3ccfe828a5768d.tar.zst bun-7eb887edd527713214790198de3ccfe828a5768d.zip |
HMR crashily works, started working on CSS Scanner
Former-commit-id: d0f91082fcc8da17d224acb5432339c5d817e1c2
Diffstat (limited to 'src')
-rw-r--r-- | src/api/schema.d.ts | 5 | ||||
-rw-r--r-- | src/api/schema.js | 16 | ||||
-rw-r--r-- | src/api/schema.peechy | 6 | ||||
-rw-r--r-- | src/api/schema.zig | 18 | ||||
-rw-r--r-- | src/css_scanner.zig | 119 | ||||
-rw-r--r-- | src/http.zig | 46 | ||||
-rw-r--r-- | src/js_ast.zig | 2 | ||||
-rw-r--r-- | src/js_lexer.zig | 28 | ||||
-rw-r--r-- | src/js_lexer_tables.zig | 6 | ||||
-rw-r--r-- | src/js_parser/js_parser.zig | 2 | ||||
-rw-r--r-- | src/js_printer.zig | 4 | ||||
-rw-r--r-- | src/linker.zig | 4 | ||||
-rw-r--r-- | src/options.zig | 9 | ||||
-rw-r--r-- | src/runtime/hmr.ts | 107 | ||||
-rw-r--r-- | src/string_immutable.zig | 61 | ||||
-rw-r--r-- | src/string_mutable.zig | 3 | ||||
-rw-r--r-- | src/string_types.zig | 1 |
17 files changed, 313 insertions, 124 deletions
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts index 902c376ff..e5370097c 100644 --- a/src/api/schema.d.ts +++ b/src/api/schema.d.ts @@ -312,8 +312,7 @@ type uint32 = number; id: uint32; from_timestamp: uint32; loader: Loader; - module_path: alphanumeric; - log: Log; + module_path: string; blob_length: uint32; } @@ -321,7 +320,7 @@ type uint32 = number; id: uint32; from_timestamp: uint32; loader: Loader; - module_path: alphanumeric; + module_path: string; log: Log; } diff --git a/src/api/schema.js b/src/api/schema.js index 9fdd5e73d..8882ecc30 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -1391,8 +1391,7 @@ function decodeWebsocketMessageBuildSuccess(bb) { result["id"] = bb.readUint32(); result["from_timestamp"] = bb.readUint32(); result["loader"] = Loader[bb.readByte()]; - result["module_path"] = bb.readAlphanumeric(); - result["log"] = decodeLog(bb); + result["module_path"] = bb.readString(); result["blob_length"] = bb.readUint32(); return result; } @@ -1424,18 +1423,11 @@ bb.writeByte(encoded); var value = message["module_path"]; if (value != null) { - bb.writeAlphanumeric(value); + bb.writeString(value); } else { throw new Error("Missing required field \"module_path\""); } - var value = message["log"]; - if (value != null) { - encodeLog(value, bb); - } else { - throw new Error("Missing required field \"log\""); - } - var value = message["blob_length"]; if (value != null) { bb.writeUint32(value); @@ -1451,7 +1443,7 @@ function decodeWebsocketMessageBuildFailure(bb) { result["id"] = bb.readUint32(); result["from_timestamp"] = bb.readUint32(); result["loader"] = Loader[bb.readByte()]; - result["module_path"] = bb.readAlphanumeric(); + result["module_path"] = bb.readString(); result["log"] = decodeLog(bb); return result; } @@ -1483,7 +1475,7 @@ bb.writeByte(encoded); var value = message["module_path"]; if (value != null) { - bb.writeAlphanumeric(value); + bb.writeString(value); } else { throw new Error("Missing required field \"module_path\""); } diff --git a/src/api/schema.peechy b/src/api/schema.peechy index 48387906b..e65319dc4 100644 --- a/src/api/schema.peechy +++ b/src/api/schema.peechy @@ -287,10 +287,8 @@ struct WebsocketMessageBuildSuccess { uint32 from_timestamp; Loader loader; - alphanumeric module_path; + string module_path; - Log log; - // This is the length of the blob that immediately follows this message. uint32 blob_length; } @@ -301,7 +299,7 @@ struct WebsocketMessageBuildFailure { uint32 from_timestamp; Loader loader; - alphanumeric module_path; + string module_path; Log log; } diff --git a/src/api/schema.zig b/src/api/schema.zig index 94c5bb643..6c4b539f8 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -206,16 +206,11 @@ pub fn Writer(comptime WritableStream: type) type { pub fn writeValue(this: *Self, slice: anytype) !void { switch (@TypeOf(slice)) { - []u8, []const u8 => { - try this.writeArray(u8, slice); - }, - []u16, []u32, []i16, []i32, []i8, - []const u16, []const u32, []const i16, @@ -225,6 +220,10 @@ pub fn Writer(comptime WritableStream: type) type { try this.writeArray(@TypeOf(slice), slice); }, + []u8, []const u8 => { + try this.writeArray(u8, slice); + }, + u8 => { try this.write(slice); }, @@ -1413,9 +1412,6 @@ pub const Api = struct { /// module_path module_path: []const u8, - /// log - log: Log, - /// blob_length blob_length: u32 = 0, @@ -1426,7 +1422,6 @@ pub const Api = struct { this.from_timestamp = try reader.readValue(u32); this.loader = try reader.readValue(Loader); this.module_path = try reader.readValue([]const u8); - this.log = try reader.readValue(Log); this.blob_length = try reader.readValue(u32); return this; } @@ -1435,8 +1430,7 @@ pub const Api = struct { try writer.writeInt(this.id); try writer.writeInt(this.from_timestamp); try writer.writeEnum(this.loader); - try writer.writeArray(u8, this.module_path); - try writer.writeValue(this.log); + try writer.writeValue(this.module_path); try writer.writeInt(this.blob_length); } }; @@ -1472,7 +1466,7 @@ pub const Api = struct { try writer.writeInt(this.id); try writer.writeInt(this.from_timestamp); try writer.writeEnum(this.loader); - try writer.writeArray(u8, this.module_path); + try writer.writeValue(this.module_path); try writer.writeValue(this.log); } }; diff --git a/src/css_scanner.zig b/src/css_scanner.zig new file mode 100644 index 000000000..eb5032608 --- /dev/null +++ b/src/css_scanner.zig @@ -0,0 +1,119 @@ +const Fs = @import("fs.zig"); +const std = @import("std"); +usingnamespace @import("global.zig"); +const options = @import("./options.zig"); +const logger = @import("./logger.zig"); + +// This is not a CSS parser. +// All this does is scan for URLs and @import statements +// Once found, it resolves & rewrites them +// Eventually, there will be a real CSS parser in here. +// But, no time yet. +pub const Chunk = struct { + // Entire chunk + range: logger.Range, + + pub const Content = union(Tag) { + t_url: TextContent, + t_import: Import, + t_verbatim: Verbatim, + }; + + pub const TextContent = struct { + quote: Quote = .none, + utf8: string, + + pub const Quote = enum { + none, + double, + single, + }; + }; + pub const Import = struct { + url: bool = false, + text: TextContent, + }; + pub const Verbatim = struct {}; + + pub const Tag = enum { + t_url, + t_verbatim, + t_import, + }; +}; + +pub const Token = enum { + t_end_of_file, + t_semicolon, + t_whitespace, + t_at_import, + t_url, + t_verbatim, +}; + +pub fn NewScanner( + comptime ReaderType: type, + comptime WriterType: type, + comptime ResolverType: type, + comptime buffer_size: usize, +) type { + return struct { + const Scanner = @This(); + buffer: [buffer_size]u8 = undefined, + current: usize = 0, + start: usize = 0, + end: usize = 0, + log: *logger.Log, + + has_newline_before: bool = false, + + token: Token, + + reader: ReaderType, + writer: WriterType, + resolver: ResolverType, + + pub fn step(scanner: *Scanner) !void {} + pub fn raw(scanner: *Scanner) string {} + pub fn next(scanner: *Scanner) !void { + scanner.has_newline_before = scanner.end == 0; + + while (true) { + scanner.start = scanner.end; + scanner.token = .t_end_of_file; + + switch (scanner.nextCodepoint()) { + ' ', '\t', '\n', '\r', 0x0C => {}, + '@' => {}, + '\'', '"' => {}, + '/' => {}, + } + } + } + pub fn eat(scanner: *Scanner) !Result {} + + inline fn nextCodepointSlice(it: *Scanner) []const u8 { + @setRuntimeSafety(false); + + const cp_len = utf8ByteSequenceLength(it.source.contents[it.current]); + it.end = it.current; + it.current += cp_len; + + return if (!(it.current > it.source.contents.len)) it.source.contents[it.current - cp_len .. it.current] else ""; + } + + pub fn nextCodepoint(it: *Scanner) CodePoint { + const slice = it.nextCodepointSlice(); + @setRuntimeSafety(false); + + return switch (slice.len) { + 0 => -1, + 1 => @intCast(CodePoint, slice[0]), + 2 => @intCast(CodePoint, std.unicode.utf8Decode2(slice) catch unreachable), + 3 => @intCast(CodePoint, std.unicode.utf8Decode3(slice) catch unreachable), + 4 => @intCast(CodePoint, std.unicode.utf8Decode4(slice) catch unreachable), + else => unreachable, + }; + } + }; +} diff --git a/src/http.zig b/src/http.zig index b87b1d67a..e818efd89 100644 --- a/src/http.zig +++ b/src/http.zig @@ -413,6 +413,7 @@ pub const RequestContext = struct { id: u32, timestamp: u32, bytes: []const u8 = "", + approximate_newline_count: usize = 0, pub const Value = union(Tag) { success: Api.WebsocketMessageBuildSuccess, fail: Api.WebsocketMessageBuildFailure, @@ -471,7 +472,16 @@ pub const RequestContext = struct { this.printer.ctx.reset(); - var written = this.bundler.print(parse_result, @TypeOf(this.printer), this.printer) catch |err| { + var old_linker_allocator = this.bundler.linker.allocator; + defer this.bundler.linker.allocator = old_linker_allocator; + this.bundler.linker.allocator = this.allocator; + try this.bundler.linker.link( + Fs.Path.init(file_path_str), + &parse_result, + .absolute_url, + ); + + var written = this.bundler.print(parse_result, @TypeOf(&this.printer), &this.printer) catch |err| { return WatchBuildResult{ .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) }, .id = id, @@ -485,13 +495,14 @@ pub const RequestContext = struct { .id = id, .from_timestamp = from_timestamp, .loader = parse_result.loader.toAPI(), - .module_path = file_path_str, + .module_path = this.bundler.fs.relativeTo(file_path_str), .blob_length = @truncate(u32, written), - .log = std.mem.zeroes(Api.Log), + // .log = std.mem.zeroes(Api.Log), }, }, .id = id, .bytes = this.printer.ctx.written, + .approximate_newline_count = parse_result.ast.approximate_newline_count, .timestamp = WebsocketHandler.toTimestamp(this.timer.read()), }; }, @@ -614,7 +625,6 @@ pub const RequestContext = struct { fn _handle(handler: *WebsocketHandler) !void { var ctx = &handler.ctx; - defer handler.message_buffer.deinit(); defer handler.tombstone = true; defer removeWebsocket(handler); defer ctx.arena.deinit(); @@ -718,14 +728,26 @@ pub const RequestContext = struct { .success => |fail| fail.module_path, }; - Output.prettyln( - "<r>[{s}] Built <b>{s}<r><b>{d}ms", - .{ - @tagName(std.meta.activeTag(build_result.value)), - file_path, - build_result.timestamp - cmd.timestamp, + switch (build_result.value) { + .fail => { + Output.errorLn( + "Error: <b>{s}<r><b>", + .{ + file_path, + }, + ); }, - ); + .success => { + Output.prettyln( + "<r><b><green>{d}ms<r> <d>built<r> <b>{s}<r><b> <r><d>({d}+ LOC)", + .{ + build_result.timestamp - cmd.timestamp, + file_path, + build_result.approximate_newline_count, + }, + ); + }, + } defer Output.flush(); msg.timestamp = build_result.timestamp; @@ -1212,7 +1234,7 @@ pub const Server = struct { RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| { Output.prettyln("Error writing change notification: {s}", .{@errorName(err)}); }; - Output.prettyln("Detected file change: {s}", .{file_path}); + Output.prettyln("<r><d>Detected file change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); } } diff --git a/src/js_ast.zig b/src/js_ast.zig index 7cf301457..9df3e36f0 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -3485,7 +3485,7 @@ pub const ArrayBinding = struct { }; pub const Ast = struct { - approximate_line_count: i32 = 0, + approximate_newline_count: usize = 0, has_lazy_export: bool = false, runtime_imports: Runtime.Imports, diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 7ab7eab86..c71b6b628 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -12,7 +12,6 @@ const unicode = std.unicode; const Source = logger.Source; pub const T = tables.T; -pub const CodePoint = tables.CodePoint; pub const Keywords = tables.Keywords; pub const tokenToString = tables.tokenToString; pub const StrictModeReservedWords = tables.StrictModeReservedWords; @@ -20,17 +19,6 @@ pub const PropertyModifierKeyword = tables.PropertyModifierKeyword; pub const TypescriptStmtKeyword = tables.TypescriptStmtKeyword; pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifier; -pub fn utf8ByteSequenceLength(first_byte: u8) u3 { - // The switch is optimized much better than a "smart" approach using @clz - return switch (first_byte) { - 0b0000_0000...0b0111_1111 => 1, - 0b1100_0000...0b1101_1111 => 2, - 0b1110_0000...0b1110_1111 => 3, - 0b1111_0000...0b1111_0111 => 4, - else => 0, - }; -} - fn notimpl() noreturn { Global.panic("not implemented yet!", .{}); } @@ -68,7 +56,7 @@ pub const Lexer = struct { start: usize = 0, end: usize = 0, did_panic: bool = false, - approximate_newline_count: i32 = 0, + approximate_newline_count: usize = 0, previous_backslash_quote_in_jsx: logger.Range = logger.Range.None, token: T = T.t_end_of_file, has_newline_before: bool = false, @@ -137,15 +125,7 @@ pub const Lexer = struct { inline fn nextCodepointSlice(it: *LexerType) []const u8 { @setRuntimeSafety(false); - // if (it.current >= it.source.contents.len) { - // // without this line, strings cut off one before the last characte - // it.end = it.current; - // @setRuntimeSafety(false); - - // return null; - // } - - const cp_len = utf8ByteSequenceLength(it.source.contents[it.current]); + const cp_len = strings.utf8ByteSequenceLength(it.source.contents[it.current]); it.end = it.current; it.current += cp_len; @@ -643,9 +623,7 @@ pub const Lexer = struct { // This count is approximate because it handles "\n" and "\r\n" (the common // cases) but not "\r" or "\u2028" or "\u2029". Getting this wrong is harmless // because it's only a preallocation. The array will just grow if it's too small. - if (lexer.code_point == '\n') { - lexer.approximate_newline_count += 1; - } + lexer.approximate_newline_count += @boolToInt(lexer.code_point == '\n'); } pub fn expect(self: *LexerType, comptime token: T) !void { diff --git a/src/js_lexer_tables.zig b/src/js_lexer_tables.zig index d373cb0b0..71657a278 100644 --- a/src/js_lexer_tables.zig +++ b/src/js_lexer_tables.zig @@ -1,3 +1,5 @@ +usingnamespace @import("string_types.zig"); + const std = @import("std"); const expectString = std.testing.expectEqualStrings; const expect = std.testing.expect; @@ -201,8 +203,6 @@ pub const StrictModeReservedWords = std.ComptimeStringMap(bool, .{ .{ "yield", true }, }); -pub const CodePoint = i32; - pub const PropertyModifierKeyword = enum { p_abstract, p_async, @@ -240,7 +240,7 @@ pub const TypeScriptAccessibilityModifier = std.ComptimeStringMap(u1, .{ pub const TokenEnumType = std.EnumArray(T, []u8); -pub const tokenToString = comptime { +pub const tokenToString = { var TEndOfFile = "end of file".*; var TSyntaxError = "syntax error".*; var THashbang = "hashbang comment".*; diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig index 5497a664d..76ffef336 100644 --- a/src/js_parser/js_parser.zig +++ b/src/js_parser/js_parser.zig @@ -13794,7 +13794,7 @@ pub fn NewParser( .import_records = p.import_records.items, .export_star_import_records = p.export_star_import_records.items, .top_level_symbol_to_parts = p.top_level_symbol_to_parts, - .approximate_line_count = p.lexer.approximate_newline_count + 1, + .approximate_newline_count = p.lexer.approximate_newline_count, .exports_kind = exports_kind, .named_imports = p.named_imports, .named_exports = p.named_exports, diff --git a/src/js_printer.zig b/src/js_printer.zig index 983e46b6b..a15b9f3c7 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -3704,6 +3704,7 @@ const FileWriterInternal = struct { pub const BufferWriter = struct { buffer: MutableString = undefined, written: []const u8 = "", + approximate_newline_count: usize = 0, pub fn init(allocator: *std.mem.Allocator) !BufferWriter { return BufferWriter{ @@ -3715,10 +3716,12 @@ pub const BufferWriter = struct { } pub fn writeByte(ctx: *BufferWriter, byte: u8) anyerror!usize { try ctx.buffer.appendChar(byte); + ctx.approximate_newline_count += @boolToInt(byte == '\n'); return 1; } pub fn writeAll(ctx: *BufferWriter, bytes: anytype) anyerror!usize { try ctx.buffer.append(bytes); + ctx.approximate_newline_count += @boolToInt(bytes.len > 0 and bytes[bytes.len - 1] == '\n'); return bytes.len; } @@ -3732,6 +3735,7 @@ pub const BufferWriter = struct { pub fn reset(ctx: *BufferWriter) void { ctx.buffer.reset(); + ctx.approximate_newline_count = 0; } pub fn done( diff --git a/src/linker.zig b/src/linker.zig index 0d6b78e08..ac0244922 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -98,7 +98,7 @@ pub fn NewLinker(comptime BundlerType: type) type { if (strings.eqlComptime(import_record.path.text, Runtime.Imports.Name)) { // runtime is included in the bundle, so we don't need to dynamically import it if (linker.options.node_modules_bundle) |node_modules_bundle| { - import_record.path.text = node_modules_bundle.bundle.import_from_name; + import_record.path.text = if (linker.options.node_modules_bundle_url.len > 0) linker.options.node_modules_bundle_url else node_modules_bundle.bundle.import_from_name; } else { import_record.path = try linker.generateImportPath( source_dir, @@ -171,7 +171,7 @@ pub fn NewLinker(comptime BundlerType: type) type { }; import_record.is_bundled = true; - import_record.path.text = node_modules_bundle.bundle.import_from_name; + import_record.path.text = if (linker.options.node_modules_bundle_url.len > 0) linker.options.node_modules_bundle_url else node_modules_bundle.bundle.import_from_name; import_record.module_id = found_module.id; needs_bundle = true; continue; diff --git a/src/options.zig b/src/options.zig index 799bb9a08..204f457e1 100644 --- a/src/options.zig +++ b/src/options.zig @@ -573,6 +573,7 @@ pub const BundleOptions = struct { public_dir_enabled: bool = true, output_dir: string = "", output_dir_handle: ?std.fs.Dir = null, + node_modules_bundle_url: string = "", public_dir_handle: ?std.fs.Dir = null, write: bool = false, preserve_symlinks: bool = false, @@ -734,6 +735,14 @@ pub const BundleOptions = struct { var node_module_bundle = try allocator.create(NodeModuleBundle); node_module_bundle.* = bundle; opts.node_modules_bundle = node_module_bundle; + if (opts.public_url.len > 0) { + var relative = node_module_bundle.bundle.import_from_name; + if (relative[0] == std.fs.path.sep) { + relative = relative[1..]; + } + + opts.node_modules_bundle_url = try std.fmt.allocPrint(allocator, "{s}{s}", .{ opts.public_url, relative }); + } const elapsed = @intToFloat(f64, (std.time.nanoTimestamp() - time_start)) / std.time.ns_per_ms; Output.prettyErrorln( "<r><b><d>\"{s}\"<r><d> - {d} modules, {d} packages <b>[{d:>.2}ms]<r>", diff --git a/src/runtime/hmr.ts b/src/runtime/hmr.ts index 10afeb8ea..1d7a41c21 100644 --- a/src/runtime/hmr.ts +++ b/src/runtime/hmr.ts @@ -50,7 +50,13 @@ class HMRClient { } static activate(verbose: boolean = false) { - if (this.client) { + // Support browser-like envirnments where location and WebSocket exist + // Maybe it'll work in Deno! Who knows. + if ( + this.client || + typeof location === "undefined" || + typeof WebSocket === "undefined" + ) { return; } @@ -61,14 +67,10 @@ class HMRClient { } handleBuildFailure(buffer: ByteBuffer, timestamp: number) { - // 0: ID - // 1: Timestamp - const header_data = new Uint32Array( - buffer._data.buffer, - buffer._data.byteOffset, - buffer._data.byteOffset + 8 - ); - const index = this.indexOfModuleId(header_data[0]); + const build = API.decodeWebsocketMessageBuildFailure(buffer); + const id = build.id; + + const index = this.indexOfModuleId(id); // Ignore build failures of modules that are not loaded if (index === -1) { return; @@ -96,35 +98,28 @@ class HMRClient { }; handleBuildSuccess(buffer: ByteBuffer, timestamp: number) { - // 0: ID - // 1: Timestamp - const header_data = new Uint32Array( - buffer._data.buffer, - buffer._data.byteOffset, - buffer._data.byteOffset + 8 - ); - const index = this.indexOfModuleId(header_data[0]); + const build = API.decodeWebsocketMessageBuildSuccess(buffer); + const id = build.id; + const index = this.indexOfModuleId(id); // Ignore builds of modules that are not loaded if (index === -1) { if (this.verbose) { - __hmrlog.debug( - `Skipping reload for unknown module id:`, - header_data[0] - ); + __hmrlog.debug(`Skipping reload for unknown module id:`, id); } return; } // Ignore builds of modules we expect a later version of - const currentVersion = this.builds.get(header_data[0]) || -Infinity; - if (currentVersion > header_data[1]) { + const currentVersion = this.builds.get(id) || -Infinity; + + if (currentVersion > build.from_timestamp) { if (this.verbose) { __hmrlog.debug( `Ignoring outdated update for "${HMRModule.dependencies.modules[index].file_path}".\n Expected: >=`, currentVersion, `\n Received:`, - header_data[1] + build.from_timestamp ); } return; @@ -137,14 +132,13 @@ class HMRClient { ); } - const build = API.decodeWebsocketMessageBuildSuccess(buffer); var reload = new HotReload( - header_data[0], + build.id, index, build, // These are the bytes!! - buffer.data.length > buffer._index - ? buffer.data.subarray(buffer._index) + buffer._data.length > buffer._index + ? buffer._data.subarray(buffer._index) : new Uint8Array(0) ); reload.timings.notify = timestamp - build.from_timestamp; @@ -366,7 +360,7 @@ class HotReload { orig_deps = null; this.timings.total = - this.timings.import + this.timings.callbacks + this.build.from_timestamp; + this.timings.import + this.timings.callbacks + this.timings.notify; return Promise.resolve([ HMRModule.dependencies.modules[this.module_index], this.timings, @@ -374,6 +368,32 @@ class HotReload { } } +class DependencyGraph { + modules: HMRModule[]; + graph: Uint32Array; + graph_used = 0; + + loadDefaults() { + this.modules = new Array<HMRModule>(32); + this.graph = new Uint32Array(32); + this.graph_used = 0; + } + + static loadWithDefaults() { + const graph = new DependencyGraph(); + graph.loadDefaults(); + return graph; + } + + fork(offset: number) { + const graph = new DependencyGraph(); + graph.modules = this.modules.slice(); + graph.graph_used = offset > 0 ? offset - 1 : 0; + graph.graph = this.graph.slice(); + return graph; + } +} + class HMRModule { constructor(id: number, file_path: string) { this.id = id; @@ -424,20 +444,7 @@ class HMRModule { this._update(this.exports); } - static _dependencies = { - modules: new Array<HMRModule>(32), - graph: new Uint32Array(32), - graph_used: 0, - - fork(offset: number) { - return { - modules: HMRModule._dependencies.modules.slice(), - graph: HMRModule._dependencies.graph.slice(), - graph_used: offset - 1, - }; - }, - }; - + static _dependencies = DependencyGraph.loadWithDefaults(); exportAll(object: Object) { // object[alias] must be a function for (let alias in object) { @@ -450,7 +457,7 @@ class HMRModule { } } - static dependencies: HMRModule["_dependencies"]; + static dependencies: DependencyGraph; file_path: string; _load = function () {}; id = 0; @@ -461,16 +468,20 @@ class HMRModule { var __hmrlog = { debug(...args) { - console.debug("[speedy]", ...args); + // console.debug("[speedy]", ...args); + console.debug(...args); }, error(...args) { - console.error("[speedy]", ...args); + // console.error("[speedy]", ...args); + console.error(...args); }, log(...args) { - console.log("[speedy]", ...args); + // console.log("[speedy]", ...args); + console.log(...args); }, warn(...args) { - console.warn("[speedy]", ...args); + // console.warn("[speedy]", ...args); + console.warn(...args); }, }; diff --git a/src/string_immutable.zig b/src/string_immutable.zig index c0b72d178..c97b0901e 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -406,6 +406,67 @@ pub fn sortDesc(in: []string) void { std.sort.sort([]const u8, in, {}, cmpStringsDesc); } +pub fn utf8ByteSequenceLength(first_byte: u8) u3 { + // The switch is optimized much better than a "smart" approach using @clz + return switch (first_byte) { + 0b0000_0000...0b0111_1111 => 1, + 0b1100_0000...0b1101_1111 => 2, + 0b1110_0000...0b1110_1111 => 3, + 0b1111_0000...0b1111_0111 => 4, + else => 0, + }; +} + +pub const CodepointIterator = struct { + bytes: []const u8, + i: usize, + width: u3 = 0, + c: CodePoint = 0, + + inline fn nextCodepointSlice(it: *CodepointIterator) []const u8 { + @setRuntimeSafety(false); + + const cp_len = utf8ByteSequenceLength(it.source.contents[it.current]); + it.end = it.current; + it.current += cp_len; + + return if (!(it.current > it.source.contents.len)) it.source.contents[it.current - cp_len .. it.current] else ""; + } + + pub fn nextCodepoint(it: *CodepointIterator) CodePoint { + const slice = it.nextCodepointSlice(); + it.width = @intCast(u3, slice.len); + @setRuntimeSafety(false); + + it.c = switch (it.width) { + 0 => -1, + 1 => @intCast(CodePoint, slice[0]), + 2 => @intCast(CodePoint, std.unicode.utf8Decode2(slice) catch unreachable), + 3 => @intCast(CodePoint, std.unicode.utf8Decode3(slice) catch unreachable), + 4 => @intCast(CodePoint, std.unicode.utf8Decode4(slice) catch unreachable), + else => unreachable, + }; + + return it.c; + } + + /// Look ahead at the next n codepoints without advancing the iterator. + /// If fewer than n codepoints are available, then return the remainder of the string. + pub fn peek(it: *CodepointIterator, n: usize) []const u8 { + const original_i = it.i; + defer it.i = original_i; + + var end_ix = original_i; + var found: usize = 0; + while (found < n) : (found += 1) { + const next_codepoint = it.nextCodepointSlice() orelse return it.bytes[original_i..]; + end_ix += next_codepoint.len; + } + + return it.bytes[original_i..end_ix]; + } +}; + test "join" { var string_list = &[_]string{ "abc", "def", "123", "hello" }; const list = try join(string_list, "-", std.heap.page_allocator); diff --git a/src/string_mutable.zig b/src/string_mutable.zig index 1b63bb9e8..25709fd40 100644 --- a/src/string_mutable.zig +++ b/src/string_mutable.zig @@ -2,6 +2,7 @@ const std = @import("std"); const expect = std.testing.expect; usingnamespace @import("string_types.zig"); +const strings = @import("string_immutable.zig"); const js_lexer = @import("js_lexer.zig"); pub const MutableString = struct { @@ -90,7 +91,7 @@ pub const MutableString = struct { } else if (!needs_gap) { needs_gap = true; // skip the code point, replace it with a single _ - i += std.math.max(js_lexer.utf8ByteSequenceLength(slice[i]), 1) - 1; + i += std.math.max(strings.utf8ByteSequenceLength(slice[i]), 1) - 1; } } diff --git a/src/string_types.zig b/src/string_types.zig index 1dc6b211a..549e7cf2d 100644 --- a/src/string_types.zig +++ b/src/string_types.zig @@ -1,2 +1,3 @@ pub const string = []const u8; pub const stringMutable = []u8; +pub const CodePoint = i32; |