diff options
author | 2022-04-22 01:08:25 -0700 | |
---|---|---|
committer | 2022-04-22 01:08:25 -0700 | |
commit | 900847fc721070c382c9e5581640a7f33db02bf3 (patch) | |
tree | 314f273627bee9f34e6214fc29b5fa4287b7c501 | |
parent | ab5432a5c6b5203b3c7fdc8e9258fb681baf7069 (diff) | |
download | bun-900847fc721070c382c9e5581640a7f33db02bf3.tar.gz bun-900847fc721070c382c9e5581640a7f33db02bf3.tar.zst bun-900847fc721070c382c9e5581640a7f33db02bf3.zip |
[js parser] switch to bitset for import record flags
-rw-r--r-- | src/bundler/bundle_v2.zig | 18 | ||||
-rw-r--r-- | src/bundler/generate_node_modules_bundle.zig | 6 | ||||
-rw-r--r-- | src/import_record.zig | 138 | ||||
-rw-r--r-- | src/javascript/jsc/api/transpiler.zig | 2 | ||||
-rw-r--r-- | src/js_parser/js_parser.zig | 49 | ||||
-rw-r--r-- | src/js_printer.zig | 20 | ||||
-rw-r--r-- | src/linker.zig | 4 |
7 files changed, 166 insertions, 71 deletions
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index f06193f1e..39cff2b74 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -798,7 +798,7 @@ const ParseTask = struct { error.ModuleNotFound => { const addError = Logger.Log.addResolveErrorWithTextDupeMaybeWarn; - if (!import_record.handles_import_errors) { + if (!import_record.handles_import_errors()) { last_error = err; if (isPackagePath(import_record.path.text)) { if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { @@ -1595,7 +1595,7 @@ const LinkerContext = struct { // // In that case the module *is* considered a CommonJS module because // the namespace object must be created. - if ((record.contains_import_star or record.contains_default_alias) and + if ((record.contains_import_star() or record.contains_default_alias()) and // TODO: hasLazyExport (other_wrap == .none)) { @@ -2047,7 +2047,7 @@ const LinkerContext = struct { // We should use "__require" instead of "require" if we're not // generating a CommonJS output file, since it won't exist otherwise if (this.shouldCallRuntimeRequire(output_format)) { - record.calls_runtime_require = true; + record.enable(.calls_runtime_require); runtime_require_uses += 1; } @@ -2065,11 +2065,11 @@ const LinkerContext = struct { // if (kind != .require and (kind != .stmt or - record.contains_import_star or - record.contains_default_alias or - record.contains_es_module_alias)) + record.contains_import_star() or + record.contains_default_alias() or + record.contains_es_module_alias())) { - record.wrap_with_to_esm = true; + record.enable(.wrap_with_to_esm); to_esm_uses += 1; } } @@ -2099,7 +2099,7 @@ const LinkerContext = struct { // This is an ES6 import of a CommonJS module, so it needs the // "__toESM" wrapper as long as it's not a bare "require()" if (kind != .require and other_export_kind == .common_js) { - record.wrap_with_to_esm = true; + record.enable(.wrap_with_to_esm); to_esm_uses += 1; } }, @@ -2197,7 +2197,7 @@ const LinkerContext = struct { Index.init(source_index), ) catch unreachable; this.graph.ast.items(.uses_export_ref)[id] = true; - record.calls_runtime_re_export_fn = true; + record.enable(.calls_runtime_re_export_fn); re_export_uses += 1; } } diff --git a/src/bundler/generate_node_modules_bundle.zig b/src/bundler/generate_node_modules_bundle.zig index e966810af..f6d8e9ab6 100644 --- a/src/bundler/generate_node_modules_bundle.zig +++ b/src/bundler/generate_node_modules_bundle.zig @@ -1367,7 +1367,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, ) catch unreachable; } else |err| { if (comptime Environment.isDebug) { - if (!import_record.handles_import_errors) { + if (!import_record.handles_import_errors()) { Output.prettyErrorln("\n<r><red>{s}<r> resolving \"{s}\" from \"{s}\"", .{ @errorName(err), import_record.path.text, @@ -1386,7 +1386,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, error.ModuleNotFound => { const addError = logger.Log.addResolveErrorWithTextDupeMaybeWarn; - if (!import_record.handles_import_errors) { + if (!import_record.handles_import_errors()) { if (isPackagePath(import_record.path.text)) { if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { try addError( @@ -1822,7 +1822,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, } else |err| { switch (err) { error.ModuleNotFound => { - if (!import_record.handles_import_errors) { + if (!import_record.handles_import_errors()) { const addError = logger.Log.addResolveErrorWithTextDupeMaybeWarn; if (isPackagePath(import_record.path.text)) { if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) { diff --git a/src/import_record.zig b/src/import_record.zig index a975cf669..b160befe0 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -88,57 +88,141 @@ pub const ImportRecord = struct { print_mode: PrintMode = .normal, + kind: ImportKind, + + tag: Tag = Tag.none, + + flags: Flags.Set = Flags.None, + + pub inline fn set(this: *ImportRecord, flag: Flags, value: bool) void { + this.flags.setPresent(flag, value); + } + + pub inline fn enable(this: *ImportRecord, flag: Flags) void { + this.set(flag, true); + } + /// True for the following cases: /// - /// try { require('x') } catch { handle } - /// try { await import('x') } catch { handle } - /// try { require.resolve('x') } catch { handle } - /// import('x').catch(handle) - /// import('x').then(_, handle) + /// `try { require('x') } catch { handle }` + /// `try { await import('x') } catch { handle }` + /// `try { require.resolve('x') } catch { handle }` + /// `import('x').catch(handle)` + /// `import('x').then(_, handle)` /// /// In these cases we shouldn't generate an error if the path could not be /// resolved. - handles_import_errors: bool = false, + pub inline fn handles_import_errors(this: *const ImportRecord) bool { + return this.flags.contains(.handles_import_errors); + } /// Sometimes the parser creates an import record and decides it isn't needed. /// For example, TypeScript code may have import statements that later turn /// out to be type-only imports after analyzing the whole file. - is_unused: bool = false, + pub inline fn is_unused(this: *const ImportRecord) bool { + return this.flags.contains(.is_unused); + } /// If this is true, the import contains syntax like "* as ns". This is used /// to determine whether modules that have no exports need to be wrapped in a /// CommonJS wrapper or not. - contains_import_star: bool = false, + pub inline fn contains_import_star(this: *const ImportRecord) bool { + return this.flags.contains(.contains_import_star); + } /// If this is true, the import contains an import for the alias "default", /// either via the "import x from" or "import {default as x} from" syntax. - contains_default_alias: bool = false, + pub inline fn contains_default_alias(this: *const ImportRecord) bool { + return this.flags.contains(.contains_default_alias); + } /// If true, this "export * from 'path'" statement is evaluated at run-time by /// calling the "__reExport()" helper function - calls_runtime_re_export_fn: bool = false, - + pub inline fn calls_runtime_re_export_fn(this: *const ImportRecord) bool { + return this.flags.contains(.calls_runtime_re_export_fn); + } /// If true, this calls require() at runtime - calls_runtime_require: bool = false, + pub inline fn calls_runtime_require(this: *const ImportRecord) bool { + return this.flags.contains(.calls_runtime_require); + } /// Tell the printer to wrap this call to "require()" in "__toModule(...)" - wrap_with_to_module: bool = false, + pub inline fn wrap_with_to_module(this: *const ImportRecord) bool { + return this.flags.contains(.wrap_with_to_module); + } /// Tell the printer to wrap this call to "toESM()" in "__toESM(...)" - wrap_with_to_esm: bool = false, + pub inline fn wrap_with_to_esm(this: *const ImportRecord) bool { + return this.flags.contains(.wrap_with_to_esm); + } - /// True for require calls like this: "try { require() } catch {}". In this - /// case we shouldn't generate an error if the path could not be resolved. - is_inside_try_body: bool = false, + // If this is true, the import contains an import for the alias "__esModule", + // via the "import {__esModule} from" syntax. + pub inline fn contains_es_module_alias(this: *const ImportRecord) bool { + return this.flags.contains(.contains_es_module_alias); + } /// If true, this was originally written as a bare "import 'file'" statement - was_originally_bare_import: bool = false, - - was_originally_require: bool = false, - - kind: ImportKind, + pub inline fn was_originally_bare_import(this: *const ImportRecord) bool { + return this.flags.contains(.was_originally_bare_import); + } + pub inline fn was_originally_require(this: *const ImportRecord) bool { + return this.flags.contains(.was_originally_require); + } - tag: Tag = Tag.none, + pub const Flags = enum { + /// True for the following cases: + /// + /// try { require('x') } catch { handle } + /// try { await import('x') } catch { handle } + /// try { require.resolve('x') } catch { handle } + /// import('x').catch(handle) + /// import('x').then(_, handle) + /// + /// In these cases we shouldn't generate an error if the path could not be + /// resolved. + handles_import_errors, + + /// Sometimes the parser creates an import record and decides it isn't needed. + /// For example, TypeScript code may have import statements that later turn + /// out to be type-only imports after analyzing the whole file. + is_unused, + + /// If this is true, the import contains syntax like "* as ns". This is used + /// to determine whether modules that have no exports need to be wrapped in a + /// CommonJS wrapper or not. + contains_import_star, + + /// If this is true, the import contains an import for the alias "default", + /// either via the "import x from" or "import {default as x} from" syntax. + contains_default_alias, + + // If this is true, the import contains an import for the alias "__esModule", + // via the "import {__esModule} from" syntax. + contains_es_module_alias, + + /// If true, this "export * from 'path'" statement is evaluated at run-time by + /// calling the "__reExport()" helper function + calls_runtime_re_export_fn, + + /// If true, this calls require() at runtime + calls_runtime_require, + + /// Tell the printer to wrap this call to "require()" in "__toModule(...)" + wrap_with_to_module, + + /// Tell the printer to wrap this call to "toESM()" in "__toESM(...)" + wrap_with_to_esm, + + /// If true, this was originally written as a bare "import 'file'" statement + was_originally_bare_import, + + was_originally_require, + + pub const None = Set{}; + pub const Fields = std.enums.EnumFieldStruct(Flags, bool, false); + pub const Set = std.enums.EnumSet(Flags); + }; pub inline fn isRuntime(this: *const ImportRecord) bool { return this.tag.isRuntime(); @@ -156,12 +240,20 @@ pub const ImportRecord = struct { pub const Tag = enum(u3) { none, + /// JSX auto-import for React Fast Refresh react_refresh, + /// JSX auto-import for jsxDEV or jsx jsx_import, + /// JSX auto-import for Fragment or createElement jsx_classic, + /// Uses the `bun` import specifier + /// import {foo} from "bun"; bun, + /// Uses the `bun:test` import specifier + /// import {expect} from "bun:test"; bun_test, runtime, + /// A macro: import specifier OR a macro import macro, pub inline fn isRuntime(this: Tag) bool { diff --git a/src/javascript/jsc/api/transpiler.zig b/src/javascript/jsc/api/transpiler.zig index 0a3eea222..29f375b3f 100644 --- a/src/javascript/jsc/api/transpiler.zig +++ b/src/javascript/jsc/api/transpiler.zig @@ -856,7 +856,7 @@ fn getParseResult(this: *Transpiler, allocator: std.mem.Allocator, code: []const if (parse_result) |*res| { for (res.ast.import_records.slice()) |*import| { if (import.kind.isCommonJS()) { - import.wrap_with_to_module = true; + import.enable(.wrap_with_to_module); import.module_id = @truncate(u32, std.hash.Wyhash.hash(0, import.path.pretty)); } } diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig index 539782e3d..fa814050c 100644 --- a/src/js_parser/js_parser.zig +++ b/src/js_parser/js_parser.zig @@ -432,7 +432,7 @@ pub const ImportScanner = struct { var record: *ImportRecord = &p.import_records.items[st.import_record_index]; if (record.tag == .macro) { - record.is_unused = true; + record.set(.is_unused, true); record.path.is_disabled = true; continue; } @@ -601,8 +601,8 @@ pub const ImportScanner = struct { { // internal imports are presumed to be always used // require statements cannot be stripped - if (!record.isInternal() and !record.was_originally_require) { - record.is_unused = true; + if (!record.isInternal() and !record.was_originally_require()) { + record.set(.is_unused, true); continue; } } @@ -615,7 +615,7 @@ pub const ImportScanner = struct { st.star_name_loc = null; } - record.contains_default_alias = record.contains_default_alias or st.default_name != null; + record.set(.contains_default_alias, record.contains_default_alias() or st.default_name != null); const existing_items: ImportItemForNamespaceMap = p.import_items_for_namespace.get(namespace_ref) orelse ImportItemForNamespaceMap.init(allocator); @@ -628,7 +628,7 @@ pub const ImportScanner = struct { // This keeps track of the `namespace_alias` incase, at printing time, we determine that we should print it with the namespace for (st.items) |item| { const is_default = strings.eqlComptime(item.alias, "default"); - record.contains_default_alias = record.contains_default_alias or is_default; + record.set(.contains_default_alias, record.contains_default_alias() or is_default); const name: LocRef = item.name; const name_ref = name.ref.?; @@ -654,10 +654,10 @@ pub const ImportScanner = struct { try p.import_records_for_current_part.append(allocator, st.import_record_index); if (st.star_name_loc != null) { - record.contains_import_star = true; + record.enable(.contains_import_star); } - if (record.was_originally_require) { + if (record.was_originally_require()) { var symbol = &p.symbols.items[namespace_ref.innerIndex()]; symbol.namespace_alias = G.NamespaceAlias{ .namespace_ref = namespace_ref, @@ -2224,10 +2224,13 @@ pub const Parser = struct { // - import 'foo'; // - import("foo") // - require("foo") - import_record.is_unused = import_record.is_unused or - (import_record.kind == .stmt and - !import_record.was_originally_bare_import and - !import_record.calls_runtime_re_export_fn); + import_record.set( + .is_unused, + import_record.is_unused() or + (import_record.kind == .stmt and + !import_record.was_originally_bare_import() and + !import_record.calls_runtime_re_export_fn()), + ); } var iter = scan_pass.used_symbols.iterator(); @@ -3114,7 +3117,7 @@ fn NewParser_( } const import_record_index = p.addImportRecord(.dynamic, arg.loc, arg.data.e_string.string(p.allocator) catch unreachable); - p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target; + p.import_records.items[import_record_index].set(.handles_import_errors, (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target); p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable; return p.e(E.Import{ .expr = arg, @@ -3153,15 +3156,15 @@ fn NewParser_( const pathname = str.string(p.allocator) catch unreachable; const import_record_index = p.addImportRecord(.require, arg.loc, pathname); - p.import_records.items[import_record_index].handles_import_errors = p.fn_or_arrow_data_visit.try_body_count != 0; + p.import_records.items[import_record_index].set(.handles_import_errors, p.fn_or_arrow_data_visit.try_body_count != 0); p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable; if (!p.options.transform_require_to_import) { return p.e(E.Require{ .import_record_index = import_record_index }, arg.loc); } - p.import_records.items[import_record_index].was_originally_require = true; - p.import_records.items[import_record_index].contains_import_star = true; + p.import_records.items[import_record_index].enable(.was_originally_require); + p.import_records.items[import_record_index].enable(.contains_import_star); const symbol_name = p.import_records.items[import_record_index].path.name.nonUniqueNameString(p.allocator); const cjs_import_name = std.fmt.allocPrint( @@ -5583,7 +5586,7 @@ fn NewParser_( const id = p.addImportRecord(.stmt, path.loc, path.text); p.import_records.items[id].tag = .macro; p.import_records.items[id].path.namespace = js_ast.Macro.namespace; - p.import_records.items[id].is_unused = true; + p.import_records.items[id].set(.is_unused, true); if (stmt.default_name) |name_loc| { const name = p.loadNameFromRef(name_loc.ref.?); @@ -5608,7 +5611,7 @@ fn NewParser_( null; stmt.import_record_index = p.addImportRecord(.stmt, path.loc, path.text); - p.import_records.items[stmt.import_record_index].was_originally_bare_import = was_originally_bare_import; + p.import_records.items[stmt.import_record_index].set(.was_originally_bare_import, was_originally_bare_import); if (stmt.star_name_loc) |star| { const name = p.loadNameFromRef(stmt.namespace_ref); @@ -5655,7 +5658,7 @@ fn NewParser_( try p.macro.refs.put(ref, new_import_id); p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace; - p.import_records.items[new_import_id].is_unused = true; + p.import_records.items[new_import_id].set(.is_unused, true); if (comptime only_scan_imports_and_do_not_visit) { p.import_records.items[new_import_id].tag = .macro; p.import_records.items[new_import_id].path.is_disabled = true; @@ -5707,7 +5710,7 @@ fn NewParser_( try p.macro.refs.put(ref, new_import_id); p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace; - p.import_records.items[new_import_id].is_unused = true; + p.import_records.items[new_import_id].set(.is_unused, true); p.import_records.items[new_import_id].tag = .macro; if (comptime only_scan_imports_and_do_not_visit) { @@ -5736,7 +5739,7 @@ fn NewParser_( if (remap_count > 0 and stmt.items.len == 0 and stmt.default_name == null) { p.import_records.items[stmt.import_record_index].path.namespace = js_ast.Macro.namespace; - p.import_records.items[stmt.import_record_index].is_unused = true; + p.import_records.items[stmt.import_record_index].set(.is_unused, true); p.import_records.items[stmt.import_record_index].tag = .macro; if (comptime only_scan_imports_and_do_not_visit) { @@ -6270,7 +6273,7 @@ fn NewParser_( if (comptime track_symbol_usage_during_parse_pass) { // In the scan pass, we need _some_ way of knowing *not* to mark as unused - p.import_records.items[import_record_index].calls_runtime_re_export_fn = true; + p.import_records.items[import_record_index].enable(.calls_runtime_re_export_fn); } try p.lexer.expectOrInsertSemicolon(); @@ -6309,7 +6312,7 @@ fn NewParser_( if (comptime track_symbol_usage_during_parse_pass) { // In the scan pass, we need _some_ way of knowing *not* to mark as unused - p.import_records.items[import_record_index].calls_runtime_re_export_fn = true; + p.import_records.items[import_record_index].enable(.calls_runtime_re_export_fn); } return p.s(S.ExportFrom{ .items = export_clause.clauses, .is_single_line = export_clause.is_single_line, .namespace_ref = namespace_ref, .import_record_index = import_record_index }, loc); @@ -10572,7 +10575,7 @@ fn NewParser_( if (strings.eqlComptime(clause.alias, "default")) { var non_unique_name = record.path.name.nonUniqueNameString(p.allocator) catch unreachable; clause.original_name = std.fmt.allocPrint(p.allocator, "{s}_default", .{non_unique_name}) catch unreachable; - record.contains_default_alias = true; + record.enable(.contains_default_alias); } const name_ref = p.declareSymbol(.import, this.loc, clause.original_name) catch unreachable; clause.name = LocRef{ .loc = this.loc, .ref = name_ref }; diff --git a/src/js_printer.zig b/src/js_printer.zig index cb6314e72..1eb4a32e0 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -487,12 +487,12 @@ const ImportVariant = enum { pub fn determine(record: *const importRecord.ImportRecord, s_import: *const S.Import) ImportVariant { var variant = ImportVariant.path_only; - if (record.contains_import_star) { + if (record.contains_import_star()) { variant = variant.hasStar(); } - if (!record.was_originally_bare_import) { - if (!record.contains_default_alias) { + if (!record.was_originally_bare_import()) { + if (!record.contains_default_alias()) { if (s_import.default_name) |default_name| { if (default_name.ref != null) { variant = variant.hasDefault(); @@ -1361,7 +1361,7 @@ pub fn NewPrinter( if (record.kind != .dynamic) { p.printSpaceBeforeIdentifier(); - if (record.path.is_disabled and record.handles_import_errors and !is_external) { + if (record.path.is_disabled and record.handles_import_errors() and !is_external) { p.printRequireError(record.path.text); return; } @@ -2071,7 +2071,7 @@ pub fn NewPrinter( if (wrap) { p.print(")"); } - } else if (import_record.was_originally_require and import_record.path.is_disabled) { + } else if (import_record.was_originally_require() and import_record.path.is_disabled) { p.printRequireError(import_record.path.text); didPrint = true; } @@ -2349,7 +2349,7 @@ pub fn NewPrinter( } pub fn printNamespaceAlias(p: *Printer, import_record: ImportRecord, namespace: G.NamespaceAlias) void { - if (import_record.isBundled() and !import_record.contains_import_star) { + if (import_record.isBundled() and !import_record.contains_import_star()) { p.print("$"); p.printModuleId(import_record.module_id); } else { @@ -3606,7 +3606,7 @@ pub fn NewPrinter( return p.printBundledImport(record, s); } - if (record.wrap_with_to_module) { + if (record.wrap_with_to_module()) { const require_ref = p.options.require_ref.?; const module_id = record.module_id; @@ -3620,7 +3620,7 @@ pub fn NewPrinter( try p.imported_module_ids.append(module_id); } - if (record.contains_import_star) { + if (record.contains_import_star()) { p.print("var "); p.printSymbol(s.namespace_ref); p.print(" = "); @@ -3673,7 +3673,7 @@ pub fn NewPrinter( p.print("} = "); - if (record.contains_import_star) { + if (record.contains_import_star()) { p.printSymbol(s.namespace_ref); p.print(";\n"); } else { @@ -3731,7 +3731,7 @@ pub fn NewPrinter( return; } - if (record.handles_import_errors and record.path.is_disabled and record.kind.isCommonJS()) { + if (record.handles_import_errors() and record.path.is_disabled and record.kind.isCommonJS()) { return; } diff --git a/src/linker.zig b/src/linker.zig index 2f2869319..ca18cfa83 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -510,7 +510,7 @@ pub const Linker = struct { // We can do this in the printer instead of creating a bunch of AST nodes here. // But we need to at least tell the printer that this needs to happen. if (resolved_import.shouldAssumeCommonJS(import_record.kind)) { - import_record.wrap_with_to_module = true; + import_record.enable(.wrap_with_to_module); import_record.module_id = @truncate(u32, std.hash.Wyhash.hash(0, path.pretty)); result.ast.needs_runtime = true; @@ -519,7 +519,7 @@ pub const Linker = struct { } else |err| { switch (err) { error.ModuleNotFound => { - if (import_record.handles_import_errors) { + if (import_record.handles_import_errors()) { import_record.path.is_disabled = true; continue; } |