diff options
author | 2023-04-12 18:40:21 -0700 | |
---|---|---|
committer | 2023-04-12 18:40:21 -0700 | |
commit | ff5c522712f4b9abb5614cc5c866f9c2b37070eb (patch) | |
tree | 08bc68f5cd3374d3a731790b0c5d1fb0e5349329 | |
parent | 14f87156a2213cd099600bcfba9f98050d45b948 (diff) | |
download | bun-ff5c522712f4b9abb5614cc5c866f9c2b37070eb.tar.gz bun-ff5c522712f4b9abb5614cc5c866f9c2b37070eb.tar.zst bun-ff5c522712f4b9abb5614cc5c866f9c2b37070eb.zip |
bundler bug fixes (#2637)
* append import to outer wrapper prefix
* print space
* require text loader
* import empty esm and cjs
* add text to schema
-rw-r--r-- | src/api/schema.d.ts | 3 | ||||
-rw-r--r-- | src/api/schema.js | 4 | ||||
-rw-r--r-- | src/api/schema.peechy | 1 | ||||
-rw-r--r-- | src/api/schema.zig | 3 | ||||
-rw-r--r-- | src/bun.js/javascript.zig | 2 | ||||
-rw-r--r-- | src/bundler/bundle_v2.zig | 812 | ||||
-rw-r--r-- | src/bundler/generate_node_modules_bundle.zig | 20 | ||||
-rw-r--r-- | src/import_record.zig | 2 | ||||
-rw-r--r-- | src/js_ast.zig | 4 | ||||
-rw-r--r-- | src/js_parser.zig | 13 | ||||
-rw-r--r-- | src/js_printer.zig | 20 | ||||
-rw-r--r-- | src/options.zig | 9 | ||||
-rw-r--r-- | src/runtime.js | 8 | ||||
-rw-r--r-- | test/bundler/esbuild/default.test.ts | 4 | ||||
-rw-r--r-- | test/bundler/esbuild/importstar.test.ts | 6 | ||||
-rw-r--r-- | test/bundler/expectBundled.ts | 2 |
16 files changed, 475 insertions, 438 deletions
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts index 6148aa10a..ed58db778 100644 --- a/src/api/schema.d.ts +++ b/src/api/schema.d.ts @@ -26,6 +26,7 @@ export const enum Loader { napi = 10, base64 = 11, dataurl = 12, + text = 13, } export const LoaderKeys: { 1: "jsx"; @@ -52,6 +53,8 @@ export const LoaderKeys: { base64: "base64"; 12: "dataurl"; dataurl: "dataurl"; + 13: "text"; + text: "text"; }; export const enum FrameworkEntryPointType { client = 1, diff --git a/src/api/schema.js b/src/api/schema.js index bc8a26215..3ec60fb4f 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -11,6 +11,7 @@ const Loader = { "10": 10, "11": 11, "12": 12, + "13": 13, "jsx": 1, "js": 2, "ts": 3, @@ -23,6 +24,7 @@ const Loader = { "napi": 10, "base64": 11, "dataurl": 12, + "text": 13, }; const LoaderKeys = { "1": "jsx", @@ -37,6 +39,7 @@ const LoaderKeys = { "10": "napi", "11": "base64", "12": "dataurl", + "13": "text", "jsx": "jsx", "js": "js", "ts": "ts", @@ -49,6 +52,7 @@ const LoaderKeys = { "napi": "napi", "base64": "base64", "dataurl": "dataurl", + "text": "text", }; const FrameworkEntryPointType = { "1": 1, diff --git a/src/api/schema.peechy b/src/api/schema.peechy index bba0e40f9..a6c0fed8a 100644 --- a/src/api/schema.peechy +++ b/src/api/schema.peechy @@ -13,6 +13,7 @@ smol Loader { napi = 10; base64 = 11; dataurl = 12; + text = 13; } smol FrameworkEntryPointType { diff --git a/src/api/schema.zig b/src/api/schema.zig index 0c5f4cf70..2629f1c5e 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -368,6 +368,9 @@ pub const Api = struct { /// dataurl dataurl, + /// text + text, + _, pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void { diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index f57f53fd6..3b8b8903e 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -2980,7 +2980,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime const loader = (bundler.options.loaders.get(Fs.PathName.init(changed_name).ext) orelse .file); var prev_entry_id: usize = std.math.maxInt(usize); - if (loader.isJavaScriptLikeOrJSON() or loader == .css) { + if (loader != .file) { var path_string: bun.PathString = undefined; var file_hash: @This().Watcher.HashType = last_file_hash; const abs_path: string = brk: { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 10ad9898c..b8854bfc1 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -334,7 +334,6 @@ pub const BundleV2 = struct { var path = result.path() orelse return null; const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; - if (!loader.isJavaScriptLikeOrJSON()) return null; var entry = try this.graph.path_to_source_index_map.getOrPut(this.graph.allocator, hash orelse wyhash(0, path.text)); if (entry.found_existing) { @@ -595,11 +594,7 @@ pub const BundleV2 = struct { while (iter.next()) |entry| { const hash = entry.key_ptr.*; const value = entry.value_ptr.*; - const loader = value.loader orelse options.Loader.file; - if (!loader.isJavaScriptLikeOrJSON()) { - // TODO: - continue; - } + var existing = graph.path_to_source_index_map.getOrPut(graph.allocator, hash) catch unreachable; // If the same file is imported and required, and those point to different files @@ -796,6 +791,11 @@ const ParseTask = struct { threadlocal var override_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + fn getEmptyAST(log: *Logger.Log, bundler: *Bundler, opts: js_parser.Parser.Options, allocator: std.mem.Allocator, source: Logger.Source) !js_ast.Ast { + const root = Expr.init(E.Undefined, E.Undefined{}, Logger.Loc.Empty); + return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?; + } + fn getAST( log: *Logger.Log, bundler: *Bundler, @@ -813,7 +813,7 @@ const ParseTask = struct { bundler.options.define, log, &source, - )) orelse return js_ast.Ast.empty; + )) orelse return try getEmptyAST(log, bundler, opts, allocator, source); }, .json => { const root = (try resolver.caches.json.parseJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); @@ -823,7 +823,14 @@ const ParseTask = struct { const root = try TOML.parse(&source, log, allocator); return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?; }, - else => return js_ast.Ast.empty, + .text => { + const root = Expr.init(E.String, E.String{ + .data = source.contents, + .prefer_template = true, + }, Logger.Loc{ .start = 0 }); + return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?; + }, + else => return try getEmptyAST(log, bundler, opts, allocator, source), } } @@ -948,17 +955,8 @@ const ParseTask = struct { var ast: js_ast.Ast = if (!is_empty) try getAST(log, bundler, opts, allocator, resolver, source, loader) - else brk: { - var empty = js_ast.Ast.empty; - empty.named_imports.allocator = allocator; - empty.named_exports.allocator = allocator; - var _parts = allocator.alloc(js_ast.Part, 1) catch unreachable; - _parts[0] = js_ast.Part{ - .can_be_removed_if_unused = true, - }; - empty.parts = BabyList(js_ast.Part).init(_parts[0..1]); - break :brk empty; - }; + else + try getEmptyAST(log, bundler, opts, allocator, source); ast.platform = platform; if (ast.parts.len <= 1) { @@ -1702,12 +1700,12 @@ const LinkerGraph = struct { part.symbol_uses = uses; const exports_ref = g.ast.items(.exports_ref)[source_index]; - const module_ref = g.ast.items(.module_ref)[source_index].?; - if (ref.eql(exports_ref)) { + const module_ref = g.ast.items(.module_ref)[source_index]; + if (!exports_ref.isNull() and ref.eql(exports_ref)) { g.ast.items(.uses_exports_ref)[source_index] = true; } - if (ref.eql(module_ref)) { + if (!module_ref.isNull() and ref.eql(module_ref)) { g.ast.items(.uses_module_ref)[source_index] = true; } @@ -2524,7 +2522,7 @@ const LinkerContext = struct { .data = stmt.data.s_lazy_export, .loc = stmt.loc, }; - const module_ref = this.graph.ast.items(.module_ref)[source_index].?; + const module_ref = this.graph.ast.items(.module_ref)[source_index]; switch (exports_kind) { .cjs => { @@ -2647,7 +2645,7 @@ const LinkerContext = struct { var export_star_import_records: [][]u32 = this.graph.ast.items(.export_star_import_records); var exports_refs: []Ref = this.graph.ast.items(.exports_ref); - var module_refs: []?Ref = this.graph.ast.items(.module_ref); + var module_refs: []Ref = this.graph.ast.items(.module_ref); var lazy_exports: []bool = this.graph.ast.items(.has_lazy_export); var symbols = &this.graph.symbols; defer this.graph.symbols = symbols.*; @@ -2899,7 +2897,7 @@ const LinkerContext = struct { export_kind == .cjs and flag.wrap == .none) { const exports_ref = symbols.follow(exports_refs[id]); - const module_ref = symbols.follow(module_refs[id].?); + const module_ref = symbols.follow(module_refs[id]); symbols.get(exports_ref).?.kind = .unbound; symbols.get(module_ref).?.kind = .unbound; } else if (flag.force_include_exports_for_entry_point or export_kind != .cjs) { @@ -2907,8 +2905,8 @@ const LinkerContext = struct { flags[id] = flag; } - const wrapped_ref = this.graph.ast.items(.wrapper_ref)[id] orelse continue; - if (wrapped_ref.isNull()) continue; + const wrapped_ref = this.graph.ast.items(.wrapper_ref)[id]; + if (wrapped_ref.isNull() or wrapped_ref.isEmpty()) continue; // Create the wrapper part for wrapped files. This is needed by a later step. this.createWrapperForFile( @@ -2961,7 +2959,7 @@ const LinkerContext = struct { this.graph.symbols.get(exports_ref) else null; - const module_ref = module_refs[id] orelse Ref.None; + const module_ref = module_refs[id]; var module_symbol: ?*js_ast.Symbol = if (module_ref.isValid()) this.graph.symbols.get(module_ref) else @@ -3023,7 +3021,7 @@ const LinkerContext = struct { ) catch unreachable; buf = buf[original_name.len..]; - this.graph.symbols.get(wrapper_refs[id].?).?.original_name = original_name; + this.graph.symbols.get(wrapper_refs[id]).?.original_name = original_name; } // If this isn't CommonJS, then rename the unused "exports" and "module" @@ -3241,7 +3239,7 @@ const LinkerContext = struct { if (other_flags.wrap != .none) { // Depend on the automatically-generated require wrapper symbol - const wrapper_ref = wrapper_refs[other_id].?; + const wrapper_ref = wrapper_refs[other_id]; this.graph.generateSymbolImportAndUse( source_index, @intCast(u32, part_index), @@ -3861,7 +3859,7 @@ const LinkerContext = struct { flags: []const JSMeta.Flags, entry_point_chunk_indices: []Index.Int, imports_to_bind: []RefImportData, - wrapper_refs: []const ?Ref, + wrapper_refs: []const Ref, sorted_and_filtered_export_aliases: []const []const string, resolved_exports: []const ResolvedExports, ctx: *LinkerContext, @@ -3882,7 +3880,7 @@ const LinkerContext = struct { var import_records = deps.import_records[source_index].slice(); const imports_to_bind = deps.imports_to_bind[source_index]; const wrap = deps.flags[source_index].wrap; - const wrapper_ref = deps.wrapper_refs[source_index].?; + const wrapper_ref = deps.wrapper_refs[source_index]; const _chunks = deps.chunks; for (parts) |part| { @@ -4002,12 +4000,12 @@ const LinkerContext = struct { // Ensure "exports" is included if the current output format needs it if (flags.force_include_exports_for_entry_point) { - imports.put(deps.wrapper_refs[chunk.entry_point.source_index].?, {}) catch unreachable; + imports.put(deps.wrapper_refs[chunk.entry_point.source_index], {}) catch unreachable; } // Include the wrapper if present if (flags.wrap != .none) { - imports.put(deps.wrapper_refs[chunk.entry_point.source_index].?, {}) catch unreachable; + imports.put(deps.wrapper_refs[chunk.entry_point.source_index], {}) catch unreachable; } } } @@ -4305,7 +4303,7 @@ const LinkerContext = struct { const all_module_scopes = c.graph.ast.items(.module_scope); const all_flags: []const JSMeta.Flags = c.graph.meta.items(.flags); const all_parts: []const js_ast.Part.List = c.graph.ast.items(.parts); - const all_wrapper_refs: []const ?Ref = c.graph.ast.items(.wrapper_ref); + const all_wrapper_refs: []const Ref = c.graph.ast.items(.wrapper_ref); const all_import_records: []const ImportRecord.List = c.graph.ast.items(.import_records); var r = try renamer.NumberRenamer.init( @@ -4379,7 +4377,7 @@ const LinkerContext = struct { // scope to this new top-level scope) but it's good enough for the // renaming code. .cjs => { - r.addTopLevelSymbol(all_wrapper_refs[source_index].?); + r.addTopLevelSymbol(all_wrapper_refs[source_index]); // External import statements will be hoisted outside of the CommonJS // wrapper if the output format supports import statements. We need to @@ -4446,7 +4444,7 @@ const LinkerContext = struct { // minify everything inside the closure without introducing a new scope // since all top-level variables will be hoisted outside of the closure. .esm => { - r.addTopLevelSymbol(all_wrapper_refs[source_index].?); + r.addTopLevelSymbol(all_wrapper_refs[source_index]); }, else => {}, @@ -4879,14 +4877,14 @@ const LinkerContext = struct { .{ .default_name = .{ .loc = Logger.Loc.Empty, - .ref = ast.wrapper_ref.?, + .ref = ast.wrapper_ref, }, .value = .{ .expr = Expr.init( E.Call, E.Call{ .target = Expr.initIdentifier( - ast.wrapper_ref.?, + ast.wrapper_ref, Logger.Loc.Empty, ), }, @@ -4913,7 +4911,7 @@ const LinkerContext = struct { E.Call, E.Call{ .target = Expr.initIdentifier( - ast.wrapper_ref.?, + ast.wrapper_ref, Logger.Loc.Empty, ), }, @@ -4936,7 +4934,7 @@ const LinkerContext = struct { E.Call, E.Call{ .target = Expr.initIdentifier( - ast.wrapper_ref.?, + ast.wrapper_ref, Logger.Loc.Empty, ), }, @@ -5120,7 +5118,7 @@ const LinkerContext = struct { Expr.init( E.Call, .{ - .target = Expr.initIdentifier(ast.wrapper_ref.?, Logger.Loc.Empty), + .target = Expr.initIdentifier(ast.wrapper_ref, Logger.Loc.Empty), }, Logger.Loc.Empty, ), @@ -5137,7 +5135,7 @@ const LinkerContext = struct { .value = Expr.init( E.Call, .{ - .target = Expr.initIdentifier(ast.wrapper_ref.?, Logger.Loc.Empty), + .target = Expr.initIdentifier(ast.wrapper_ref, Logger.Loc.Empty), }, Logger.Loc.Empty, ), @@ -5343,7 +5341,7 @@ const LinkerContext = struct { E.Call, E.Call{ .target = Expr.initIdentifier( - c.graph.ast.items(.wrapper_ref)[record.source_index.get()].?, + c.graph.ast.items(.wrapper_ref)[record.source_index.get()], loc, ), }, @@ -5381,6 +5379,32 @@ const LinkerContext = struct { return true; } + /// Code we ultimately include in the bundle is potentially wrapped + /// + /// In that case, we do a final pass over the statements list to figure out + /// where it needs to go in the wrapper, following the syntax of the output + /// format ESM import and export statements to always be top-level, so they + /// can never be inside the wrapper. + /// + /// prefix - outer + /// ... + /// init_esm = () => { + /// prefix - inner + /// ... + /// suffix - inenr + /// }; + /// ... + /// suffix - outer + /// + /// Keep in mind that we may need to wrap ES modules in some cases too + /// Consider: + /// import * as foo from 'bar'; + /// foo[computedProperty] + /// + /// In that case, when bundling, we still need to preserve that module + /// namespace object (foo) because we cannot know what they are going to + /// attempt to access statically + /// fn convertStmtsForChunk( c: *LinkerContext, source_index: u32, @@ -5392,7 +5416,7 @@ const LinkerContext = struct { ast: *const js_ast.Ast, ) !void { const shouldExtractESMStmtsForWrap = wrap != .none; - const shouldStripExports = c.options.mode != .passthrough or !chunk.isEntryPoint(); + const shouldStripExports = c.options.mode != .passthrough or c.graph.files.items(.entry_point_kind)[source_index] != .none; const flags = c.graph.meta.items(.flags); @@ -5422,32 +5446,13 @@ const LinkerContext = struct { ); } - for (part_stmts) |_stmt| { - var stmt = _stmt; - switch (stmt.data) { - .s_import => |s| { - // "import * as ns from 'path'" - // "import {foo} from 'path'" - if (try c.shouldRemoveImportExportStmt( - stmts, - stmt.loc, - s.namespace_ref, - s.import_record_index, - allocator, - ast, - )) { - continue; - } - - // Make sure these don't end up in the wrapper closure - if (shouldExtractESMStmtsForWrap) { - try stmts.outside_wrapper_prefix.append(stmt); - continue; - } - }, - .s_export_star => |s| { - // "export * as ns from 'path'" - if (s.alias) |alias| { + for (part_stmts) |stmt_| { + var stmt = stmt_; + proccess_stmt: { + switch (stmt.data) { + .s_import => |s| { + // "import * as ns from 'path'" + // "import {foo} from 'path'" if (try c.shouldRemoveImportExportStmt( stmts, stmt.loc, @@ -5459,120 +5464,108 @@ const LinkerContext = struct { continue; } - if (shouldStripExports) { - // Turn this statement into "import * as ns from 'path'" - stmt = Stmt.alloc( - S.Import, - S.Import{ - .namespace_ref = s.namespace_ref, - .import_record_index = s.import_record_index, - .star_name_loc = alias.loc, - }, - stmt.loc, - ); - } - // Make sure these don't end up in the wrapper closure if (shouldExtractESMStmtsForWrap) { try stmts.outside_wrapper_prefix.append(stmt); continue; } - - break; - } - - // "export * from 'path'" - if (!shouldStripExports) { - break; - } - - const record = ast.import_records.at(s.import_record_index); - - // Is this export star evaluated at run time? - if (!record.source_index.isValid() and c.options.output_format.keepES6ImportExportSyntax()) { - if (record.calls_runtime_re_export_fn) { - // Turn this statement into "import * as ns from 'path'" - stmt = Stmt.alloc( - S.Import, - S.Import{ - .namespace_ref = s.namespace_ref, - .import_record_index = s.import_record_index, - .star_name_loc = stmt.loc, - }, + }, + .s_export_star => |s| { + // "export * as ns from 'path'" + if (s.alias) |alias| { + if (try c.shouldRemoveImportExportStmt( + stmts, stmt.loc, - ); - - // Prefix this module with "__reExport(exports, ns, module.exports)" - const export_star_ref = c.runtimeFunction("__reExport"); - var args = try allocator.alloc(Expr, 2 + @as(usize, @boolToInt(module_exports_for_export != null))); - args[0..2].* = .{ - Expr.init( - E.Identifier, - E.Identifier{ - .ref = ast.exports_ref, - }, - stmt.loc, - ), - Expr.init( - E.Identifier, - E.Identifier{ - .ref = s.namespace_ref, - }, - stmt.loc, - ), - }; - - if (module_exports_for_export) |mod| { - args[3] = mod; + s.namespace_ref, + s.import_record_index, + allocator, + ast, + )) { + continue; } - try stmts.inside_wrapper_prefix.append( - Stmt.alloc( - S.SExpr, - S.SExpr{ - .value = Expr.init( - E.Call, - E.Call{ - .target = Expr.init( - E.Identifier, - E.Identifier{ - .ref = export_star_ref, - }, - stmt.loc, - ), - .args = bun.BabyList(Expr).init(args), - }, - stmt.loc, - ), + if (shouldStripExports) { + // Turn this statement into "import * as ns from 'path'" + stmt = Stmt.alloc( + S.Import, + S.Import{ + .namespace_ref = s.namespace_ref, + .import_record_index = s.import_record_index, + .star_name_loc = alias.loc, }, stmt.loc, - ), - ); + ); + } // Make sure these don't end up in the wrapper closure if (shouldExtractESMStmtsForWrap) { try stmts.outside_wrapper_prefix.append(stmt); continue; } + + break :proccess_stmt; } - } else { - if (record.source_index.isValid()) { - const flag = flags[record.source_index.get()]; - if (flag.wrap == .esm) { + + // "export * from 'path'" + if (!shouldStripExports) { + break :proccess_stmt; + } + + const record = ast.import_records.at(s.import_record_index); + + // Is this export star evaluated at run time? + if (!record.source_index.isValid() and c.options.output_format.keepES6ImportExportSyntax()) { + if (record.calls_runtime_re_export_fn) { + // Turn this statement into "import * as ns from 'path'" + stmt = Stmt.alloc( + S.Import, + S.Import{ + .namespace_ref = s.namespace_ref, + .import_record_index = s.import_record_index, + .star_name_loc = stmt.loc, + }, + stmt.loc, + ); + + // Prefix this module with "__reExport(exports, ns, module.exports)" + const export_star_ref = c.runtimeFunction("__reExport"); + var args = try allocator.alloc(Expr, 2 + @as(usize, @boolToInt(module_exports_for_export != null))); + args[0..2].* = .{ + Expr.init( + E.Identifier, + E.Identifier{ + .ref = ast.exports_ref, + }, + stmt.loc, + ), + Expr.init( + E.Identifier, + E.Identifier{ + .ref = s.namespace_ref, + }, + stmt.loc, + ), + }; + + if (module_exports_for_export) |mod| { + args[3] = mod; + } + try stmts.inside_wrapper_prefix.append( Stmt.alloc( S.SExpr, - .{ + S.SExpr{ .value = Expr.init( E.Call, E.Call{ .target = Expr.init( E.Identifier, E.Identifier{ - .ref = c.graph.ast.items(.wrapper_ref)[record.source_index.get()].?, + .ref = export_star_ref, }, stmt.loc, ), + .args = bun.BabyList(Expr).init(args), }, stmt.loc, ), @@ -5580,277 +5573,310 @@ const LinkerContext = struct { stmt.loc, ), ); - } - } - if (record.calls_runtime_re_export_fn) { - const target: Expr = brk: { - if (c.graph.ast.items(.exports_kind)[source_index] == .esm_with_dynamic_fallback) { - // Prefix this module with "__reExport(exports, otherExports, module.exports)" - break :brk Expr.initIdentifier(c.graph.ast.items(.exports_ref)[source_index], stmt.loc); + // Make sure these don't end up in the wrapper closure + if (shouldExtractESMStmtsForWrap) { + try stmts.outside_wrapper_prefix.append(stmt); + continue; } - - break :brk Expr.init( - E.Require, - E.Require{ - .import_record_index = s.import_record_index, - }, - stmt.loc, - ); - }; - - // Prefix this module with "__reExport(exports, require(path), module.exports)" - const export_star_ref = c.runtimeFunction("__reExport"); - var args = try allocator.alloc(Expr, 2 + @as(usize, @boolToInt(module_exports_for_export != null))); - args[0..2].* = .{ - Expr.init( - E.Identifier, - E.Identifier{ - .ref = ast.exports_ref, - }, - stmt.loc, - ), - target, - }; - - if (module_exports_for_export) |mod| { - args[3] = mod; } - - try stmts.inside_wrapper_prefix.append( - Stmt.alloc( - S.SExpr, - S.SExpr{ - .value = Expr.init( - E.Call, - E.Call{ - .target = Expr.init( - E.Identifier, - E.Identifier{ - .ref = export_star_ref, + } else { + if (record.source_index.isValid()) { + const flag = flags[record.source_index.get()]; + if (flag.wrap == .esm) { + try stmts.inside_wrapper_prefix.append( + Stmt.alloc( + S.SExpr, + .{ + .value = Expr.init( + E.Call, + E.Call{ + .target = Expr.init( + E.Identifier, + E.Identifier{ + .ref = c.graph.ast.items(.wrapper_ref)[record.source_index.get()], + }, + stmt.loc, + ), }, stmt.loc, ), - .args = js_ast.ExprNodeList.init(args), }, stmt.loc, ), - }, - stmt.loc, - ), - ); - } + ); + } + } - // Remove the export star statement - continue; - } - }, + if (record.calls_runtime_re_export_fn) { + const target: Expr = brk: { + if (c.graph.ast.items(.exports_kind)[source_index] == .esm_with_dynamic_fallback) { + // Prefix this module with "__reExport(exports, otherExports, module.exports)" + break :brk Expr.initIdentifier(c.graph.ast.items(.exports_ref)[source_index], stmt.loc); + } - .s_export_from => |s| { - // "export {foo} from 'path'" + break :brk Expr.init( + E.Require, + E.Require{ + .import_record_index = s.import_record_index, + }, + stmt.loc, + ); + }; + + // Prefix this module with "__reExport(exports, require(path), module.exports)" + const export_star_ref = c.runtimeFunction("__reExport"); + var args = try allocator.alloc(Expr, 2 + @as(usize, @boolToInt(module_exports_for_export != null))); + args[0..2].* = .{ + Expr.init( + E.Identifier, + E.Identifier{ + .ref = ast.exports_ref, + }, + stmt.loc, + ), + target, + }; - if (try c.shouldRemoveImportExportStmt( - stmts, - stmt.loc, - s.namespace_ref, - s.import_record_index, - allocator, - ast, - )) { - continue; - } + if (module_exports_for_export) |mod| { + args[3] = mod; + } - if (shouldStripExports) { - // Turn this statement into "import {foo} from 'path'" + try stmts.inside_wrapper_prefix.append( + Stmt.alloc( + S.SExpr, + S.SExpr{ + .value = Expr.init( + E.Call, + E.Call{ + .target = Expr.init( + E.Identifier, + E.Identifier{ + .ref = export_star_ref, + }, + stmt.loc, + ), + .args = js_ast.ExprNodeList.init(args), + }, + stmt.loc, + ), + }, + stmt.loc, + ), + ); + } - for (s.items) |*item| { - item.alias = item.original_name; + // Remove the export star statement + continue; } + }, - stmt = Stmt.alloc( - S.Import, - S.Import{ - .items = s.items, - .import_record_index = s.import_record_index, - .star_name_loc = stmt.loc, - .namespace_ref = s.namespace_ref, - .is_single_line = s.is_single_line, - }, + .s_export_from => |s| { + // "export {foo} from 'path'" + + if (try c.shouldRemoveImportExportStmt( + stmts, stmt.loc, - ); - } + s.namespace_ref, + s.import_record_index, + allocator, + ast, + )) { + continue; + } - // Make sure these don't end up in the wrapper closure - if (shouldExtractESMStmtsForWrap) { - try stmts.outside_wrapper_prefix.append(stmt); - continue; - } - }, + if (shouldStripExports) { + // Turn this statement into "import {foo} from 'path'" - .s_export_clause => { - // "export {foo}" + for (s.items) |*item| { + item.alias = item.original_name; + } - if (shouldStripExports) { - // Remove export statements entirely + stmt = Stmt.alloc( + S.Import, + S.Import{ + .items = s.items, + .import_record_index = s.import_record_index, + .star_name_loc = stmt.loc, + .namespace_ref = s.namespace_ref, + .is_single_line = s.is_single_line, + }, + stmt.loc, + ); + } - continue; - } + // Make sure these don't end up in the wrapper closure + if (shouldExtractESMStmtsForWrap) { + try stmts.outside_wrapper_prefix.append(stmt); + continue; + } + }, - // Make sure these don't end up in the wrapper closure - if (shouldExtractESMStmtsForWrap) { - try stmts.outside_wrapper_prefix.append(stmt); - continue; - } - }, + .s_export_clause => { + // "export {foo}" - .s_function => |s| { + if (shouldStripExports) { + // Remove export statements entirely - // Strip the "export" keyword while bundling - if (shouldStripExports and s.func.flags.contains(.is_export)) { - // Be c areful to not modify the original statement - stmt = Stmt.alloc( - S.Function, - S.Function{ - .func = s.func, - }, - stmt.loc, - ); - stmt.data.s_function.func.flags.remove(.is_export); - } - }, + continue; + } - .s_class => |s| { + // Make sure these don't end up in the wrapper closure + if (shouldExtractESMStmtsForWrap) { + try stmts.outside_wrapper_prefix.append(stmt); + continue; + } + }, - // Strip the "export" keyword while bundling - if (shouldStripExports and s.is_export) { - // Be c areful to not modify the original statement - stmt = Stmt.alloc( - S.Class, - S.Class{ - .class = s.class, - .is_export = false, - }, - stmt.loc, - ); - } - }, + .s_function => |s| { - .s_local => |s| { - // Strip the "export" keyword while bundling - if (shouldStripExports and s.is_export) { - // Be c areful to not modify the original statement - stmt = Stmt.alloc( - S.Local, - s.*, - stmt.loc, - ); - stmt.data.s_local.is_export = false; - } - }, + // Strip the "export" keyword while bundling + if (shouldStripExports and s.func.flags.contains(.is_export)) { + // Be c areful to not modify the original statement + stmt = Stmt.alloc( + S.Function, + S.Function{ + .func = s.func, + }, + stmt.loc, + ); + stmt.data.s_function.func.flags.remove(.is_export); + } + }, - .s_export_default => |s| { - // "export default foo" + .s_class => |s| { - if (shouldStripExports) { - switch (s.value) { - .stmt => |stmt2| { - switch (stmt2.data) { - .s_expr => |s2| { - // "export default foo;" => "var default = foo;" - stmt = Stmt.alloc( - S.Local, - S.Local{ - .decls = try bun.fromSlice( - []js_ast.G.Decl, - allocator, - []const js_ast.G.Decl, - &.{ - .{ - .binding = Binding.alloc( - allocator, - B.Identifier{ - .ref = s.default_name.ref.?, - }, - s2.value.loc, - ), - .value = s2.value, - }, - }, - ), - }, - stmt.loc, - ); - }, - .s_function => |s2| { - // "export default function() {}" => "function default() {}" - // "export default function foo() {}" => "function foo() {}" - - // Be careful to not modify the original statement - stmt = Stmt.alloc( - S.Function, - S.Function{ - .func = s2.func, - }, - stmt.loc, - ); - stmt.data.s_function.func.name = s.default_name; - }, + // Strip the "export" keyword while bundling + if (shouldStripExports and s.is_export) { + // Be c areful to not modify the original statement + stmt = Stmt.alloc( + S.Class, + S.Class{ + .class = s.class, + .is_export = false, + }, + stmt.loc, + ); + } + }, - .s_class => |s2| { - // "export default class {}" => "class default {}" - // "export default class foo {}" => "class foo {}" + .s_local => |s| { + // Strip the "export" keyword while bundling + if (shouldStripExports and s.is_export) { + // Be c areful to not modify the original statement + stmt = Stmt.alloc( + S.Local, + s.*, + stmt.loc, + ); + stmt.data.s_local.is_export = false; + } + }, - // Be careful to not modify the original statement - stmt = Stmt.alloc( - S.Class, - S.Class{ - .class = s2.class, - .is_export = false, - }, - stmt.loc, - ); - stmt.data.s_class.class.class_name = s.default_name; - }, + .s_export_default => |s| { + // "export default foo" - else => bun.unreachablePanic( - "Unexpected type {any} in source file {s}", - .{ - stmt2.data, - c.parse_graph.input_files.get(c.graph.files.get(source_index).input_file.get()).source.path.text, - }, - ), - } - }, - .expr => |e| { - stmt = Stmt.alloc( - S.Local, - S.Local{ - .decls = try bun.fromSlice( - []js_ast.G.Decl, - allocator, - []const js_ast.G.Decl, - &.{ - .{ - .binding = Binding.alloc( + if (shouldStripExports) { + switch (s.value) { + .stmt => |stmt2| { + switch (stmt2.data) { + .s_expr => |s2| { + // "export default foo;" => "var default = foo;" + stmt = Stmt.alloc( + S.Local, + S.Local{ + .decls = try bun.fromSlice( + []js_ast.G.Decl, allocator, - B.Identifier{ - .ref = s.default_name.ref.?, + []const js_ast.G.Decl, + &.{ + .{ + .binding = Binding.alloc( + allocator, + B.Identifier{ + .ref = s.default_name.ref.?, + }, + s2.value.loc, + ), + .value = s2.value, + }, }, - e.loc, ), - .value = e, }, + stmt.loc, + ); + }, + .s_function => |s2| { + // "export default function() {}" => "function default() {}" + // "export default function foo() {}" => "function foo() {}" + + // Be careful to not modify the original statement + stmt = Stmt.alloc( + S.Function, + S.Function{ + .func = s2.func, + }, + stmt.loc, + ); + stmt.data.s_function.func.name = s.default_name; + }, + + .s_class => |s2| { + // "export default class {}" => "class default {}" + // "export default class foo {}" => "class foo {}" + + // Be careful to not modify the original statement + stmt = Stmt.alloc( + S.Class, + S.Class{ + .class = s2.class, + .is_export = false, + }, + stmt.loc, + ); + stmt.data.s_class.class.class_name = s.default_name; + }, + + else => bun.unreachablePanic( + "Unexpected type {any} in source file {s}", + .{ + stmt2.data, + c.parse_graph.input_files.get(c.graph.files.get(source_index).input_file.get()).source.path.text, }, ), - }, - stmt.loc, - ); - }, + } + }, + .expr => |e| { + stmt = Stmt.alloc( + S.Local, + S.Local{ + .decls = try bun.fromSlice( + []js_ast.G.Decl, + allocator, + []const js_ast.G.Decl, + &.{ + .{ + .binding = Binding.alloc( + allocator, + B.Identifier{ + .ref = s.default_name.ref.?, + }, + e.loc, + ), + .value = e, + }, + }, + ), + }, + stmt.loc, + ); + }, + } } - } - }, + }, - else => {}, + else => {}, + } } try stmts.inside_wrapper_suffix.append(stmt); @@ -6097,7 +6123,7 @@ const LinkerContext = struct { .binding = js_ast.Binding.alloc( temp_allocator, js_ast.B.Identifier{ - .ref = ast.module_ref.?, + .ref = ast.module_ref, }, Logger.Loc.Empty, ), @@ -6142,7 +6168,7 @@ const LinkerContext = struct { .binding = Binding.alloc( temp_allocator, B.Identifier{ - .ref = ast.wrapper_ref.?, + .ref = ast.wrapper_ref, }, Logger.Loc.Empty, ), @@ -6290,7 +6316,7 @@ const LinkerContext = struct { .binding = Binding.alloc( temp_allocator, B.Identifier{ - .ref = ast.wrapper_ref.?, + .ref = ast.wrapper_ref, }, Logger.Loc.Empty, ), @@ -6376,7 +6402,7 @@ const LinkerContext = struct { else Ref.None, .is_wrapper_async = flags.is_async_or_has_async_dependency, - .wrapper_ref = c.graph.ast.items(.wrapper_ref)[source_index] orelse Ref.None, + .wrapper_ref = c.graph.ast.items(.wrapper_ref)[source_index], }; } @@ -7114,8 +7140,8 @@ const LinkerContext = struct { c.allocator, &[_]js_ast.DeclaredSymbol{ .{ .ref = c.graph.ast.items(.exports_ref)[source_index], .is_top_level = true }, - .{ .ref = c.graph.ast.items(.module_ref)[source_index].?, .is_top_level = true }, - .{ .ref = c.graph.ast.items(.wrapper_ref)[source_index].?, .is_top_level = true }, + .{ .ref = c.graph.ast.items(.module_ref)[source_index], .is_top_level = true }, + .{ .ref = c.graph.ast.items(.wrapper_ref)[source_index], .is_top_level = true }, }, ) catch unreachable, .dependencies = Dependency.List.init(dependencies), diff --git a/src/bundler/generate_node_modules_bundle.zig b/src/bundler/generate_node_modules_bundle.zig index 25cfe912d..4a3323086 100644 --- a/src/bundler/generate_node_modules_bundle.zig +++ b/src/bundler/generate_node_modules_bundle.zig @@ -282,8 +282,6 @@ fn upsert(this: *GenerateNodeModuleBundle, module_id: u32, resolve: _resolver.Re pub fn ensurePathIsAllocated(this: *GenerateNodeModuleBundle, path_: ?*Fs.Path) !void { var path = path_ orelse return; - const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; - if (!loader.isJavaScriptLikeOrJSON()) return; path.* = try path.dupeAlloc(this.allocator); } @@ -292,7 +290,6 @@ pub fn enqueueItem(this: *GenerateNodeModuleBundle, resolve: _resolver.Result) ! var path = result.path() orelse return; const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file; - if (!loader.isJavaScriptLikeOrJSON()) return; path.* = try path.dupeAlloc(this.allocator); if (BundledModuleData.get(this, &result)) |mod| { @@ -1335,7 +1332,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, if (JSC.DisabledModule.has(import_record.path.text)) { import_record.path.is_disabled = true; import_record.do_commonjs_transform_in_printer = true; - import_record.is_bundled = true; + import_record.is_legacy_bundled = true; continue; } @@ -1343,7 +1340,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, import_record.path.text = remapped.path; import_record.tag = remapped.tag; if (remapped.tag != .none) { - import_record.is_bundled = false; + import_record.is_legacy_bundled = false; continue; } } @@ -1360,14 +1357,6 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, continue; }; - const loader_ = bundler.options.loader(path.name.ext); - - if (!loader_.isJavaScriptLikeOrJSON()) { - import_record.path.is_disabled = true; - - continue; - } - // if (_resolved_import.package_json == null) |pkg_json| { // _resolved_import.package_json = if (pkg_json.hash == resolve.package_json.?.hash) // resolve.package_json @@ -1781,7 +1770,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, if (bundler.options.platform.isBun()) { if (JSC.DisabledModule.has(import_record.path.text)) { import_record.path.is_disabled = true; - import_record.is_bundled = true; + import_record.is_legacy_bundled = true; continue; } @@ -1789,7 +1778,7 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, import_record.path.text = remapped.path; import_record.tag = remapped.tag; if (remapped.tag != .none) { - import_record.is_bundled = false; + import_record.is_legacy_bundled = false; continue; } } @@ -1804,7 +1793,6 @@ pub fn processFile(this: *GenerateNodeModuleBundle, worker: *ThreadPool.Worker, var path = _resolved_import.path() orelse continue; const loader_ = this.bundler.options.loader(path.name.ext); - if (!loader_.isJavaScriptLikeOrJSON()) continue; path.* = try path.dupeAlloc(this.allocator); diff --git a/src/import_record.zig b/src/import_record.zig index 8d2fd9b7f..27e7bd31f 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -109,7 +109,7 @@ pub const ImportRecord = struct { /// This tells the printer that we should print as export var $moduleID = ... /// Instead of using the path. - is_bundled: bool = false, + is_legacy_bundled: bool = false, /// Sometimes the parser creates an import record and decides it isn't needed. /// For example, TypeScript code may have import statements that later turn diff --git a/src/js_ast.zig b/src/js_ast.zig index 743110d0b..8ae239e2f 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -5274,8 +5274,8 @@ pub const Ast = struct { module_scope: Scope = Scope{}, // char_freq: *CharFreq, exports_ref: Ref = Ref.None, - module_ref: ?Ref = null, - wrapper_ref: ?Ref = null, + module_ref: Ref = Ref.None, + wrapper_ref: Ref = Ref.None, require_ref: Ref = Ref.None, bun_plugin: BunPlugin = .{}, diff --git a/src/js_parser.zig b/src/js_parser.zig index 37d1765d9..8776d309f 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -2776,7 +2776,14 @@ pub const Parser = struct { var parts = try p.allocator.alloc(js_ast.Part, 2); parts[0..2].* = .{ ns_export_part, part }; - result.ast = try p.toAST(parts, js_ast.ExportsKind.none, null); + const exports_kind: js_ast.ExportsKind = brk: { + if (expr.data == .e_undefined) { + if (strings.eqlComptime(this.source.path.name.ext, ".cjs")) break :brk .cjs; + if (strings.eqlComptime(this.source.path.name.ext, ".mjs")) break :brk .esm; + } + break :brk .none; + }; + result.ast = try p.toAST(parts, exports_kind, null); result.ok = true; return result; @@ -20545,7 +20552,7 @@ fn NewParser_( } } - const wrapper_ref: ?Ref = brk: { + const wrapper_ref: Ref = brk: { if (p.options.bundle) { break :brk p.newSymbol( .other, @@ -20559,7 +20566,7 @@ fn NewParser_( ) catch unreachable; } - break :brk @as(?Ref, null); + break :brk Ref.None; }; return .{ diff --git a/src/js_printer.zig b/src/js_printer.zig index 1bf1fabd2..7042694c7 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -2067,14 +2067,14 @@ fn NewPrinter( } }, .e_require => |e| { - if (rewrite_esm_to_cjs and p.importRecord(e.import_record_index).is_bundled) { + if (rewrite_esm_to_cjs and p.importRecord(e.import_record_index).is_legacy_bundled) { p.printIndent(); p.printBundledRequire(e); p.printSemicolonIfNeeded(); return; } - if (!rewrite_esm_to_cjs or !p.importRecord(e.import_record_index).is_bundled) { + if (!rewrite_esm_to_cjs or !p.importRecord(e.import_record_index).is_legacy_bundled) { p.printRequireOrImportExpr(e.import_record_index, &([_]G.Comment{}), level, flags); } }, @@ -2581,7 +2581,7 @@ fn NewPrinter( } else if (symbol.namespace_alias) |namespace| { if (namespace.import_record_index < p.import_records.len) { const import_record = p.importRecord(namespace.import_record_index); - if ((comptime is_inside_bundle) or import_record.is_bundled or namespace.was_originally_property_access) { + if ((comptime is_inside_bundle) or import_record.is_legacy_bundled or namespace.was_originally_property_access) { var wrap = false; didPrint = true; @@ -3731,7 +3731,7 @@ fn NewPrinter( if (symbol.namespace_alias) |namespace| { const import_record = p.importRecord(namespace.import_record_index); - if (import_record.is_bundled or (comptime is_inside_bundle) or namespace.was_originally_property_access) { + if (import_record.is_legacy_bundled or (comptime is_inside_bundle) or namespace.was_originally_property_access) { p.printIdentifier(name); p.print(": () => "); p.printNamespaceAlias(import_record.*, namespace); @@ -3802,7 +3802,7 @@ fn NewPrinter( if (p.symbols().get(item.name.ref.?)) |symbol| { if (symbol.namespace_alias) |namespace| { const import_record = p.importRecord(namespace.import_record_index); - if (import_record.is_bundled or (comptime is_inside_bundle) or namespace.was_originally_property_access) { + if (import_record.is_legacy_bundled or (comptime is_inside_bundle) or namespace.was_originally_property_access) { p.print("var "); p.printSymbol(item.name.ref.?); p.@"print = "(); @@ -4483,7 +4483,7 @@ fn NewPrinter( } return; - } else if (record.is_bundled) { + } else if (record.is_legacy_bundled) { if (!record.path.is_disabled) { if (!p.has_printed_bundled_import_statement) { p.has_printed_bundled_import_statement = true; @@ -4494,14 +4494,14 @@ fn NewPrinter( // This might be a determinsim issue // But, it's not random skip: for (p.import_records, 0..) |_record, i| { - if (!_record.is_bundled or _record.module_id == 0) continue; + if (!_record.is_legacy_bundled or _record.module_id == 0) continue; if (i < last) { // Prevent importing the same module ID twice // We could use a map but we want to avoid allocating // and this should be pretty quick since it's just comparing a uint32 for (p.import_records[i + 1 ..]) |_record2| { - if (_record2.is_bundled and _record2.module_id > 0 and _record2.module_id == _record.module_id) { + if (_record2.is_legacy_bundled and _record2.module_id > 0 and _record2.module_id == _record.module_id) { continue :skip; } } @@ -4580,11 +4580,11 @@ fn NewPrinter( item_count += 1; } - if (s.star_name_loc != null) { + if (record.contains_import_star) { if (item_count > 0) { p.print(","); - p.printSpace(); } + p.printSpace(); p.printWhitespacer(ws("* as ")); p.printSymbol(s.namespace_ref); diff --git a/src/options.zig b/src/options.zig index b0756a396..feadc08d7 100644 --- a/src/options.zig +++ b/src/options.zig @@ -741,6 +741,8 @@ pub const Loader = enum { .{ "node", Loader.napi }, .{ "dataurl", Loader.dataurl }, .{ "base64", Loader.base64 }, + .{ "txt", Loader.text }, + .{ "text", Loader.text }, }); pub fn fromString(slice_: string) ?Loader { @@ -770,6 +772,7 @@ pub const Loader = enum { .toml => .toml, .wasm => .wasm, .napi => .napi, + .text => .text, else => .file, }; } @@ -785,6 +788,7 @@ pub const Loader = enum { .toml => .toml, .wasm => .wasm, .napi => .napi, + .text => .text, else => .file, }; } @@ -840,6 +844,8 @@ pub const defaultLoaders = ComptimeStringMap(Loader, .{ .{ ".toml", Loader.toml }, .{ ".wasm", Loader.wasm }, .{ ".node", Loader.napi }, + .{ ".txt", Loader.text }, + .{ ".text", Loader.text }, }); // https://webpack.js.org/guides/package-exports/#reference-syntax @@ -1143,6 +1149,7 @@ const default_loader_ext = [_]string{ ".mts", ".cts", ".toml", ".wasm", + ".txt", ".text", }; pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api.LoaderMap, platform: Platform) !bun.StringArrayHashMap(Loader) { @@ -1161,6 +1168,7 @@ pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api. .toml => Loader.toml, .wasm => Loader.wasm, .napi => Loader.napi, + .text => Loader.text, else => unreachable, }; @@ -1177,6 +1185,7 @@ pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api. .json => Loader.json, .toml => Loader.toml, .wasm => Loader.wasm, + .text => Loader.text, else => unreachable, }; diff --git a/src/runtime.js b/src/runtime.js index b4631ea1e..7255a5ace 100644 --- a/src/runtime.js +++ b/src/runtime.js @@ -176,12 +176,8 @@ export var __internalIsCommonJSNamespace = /* @__PURE__ */ namespace => ((namespace.default && namespace.default[cjsRequireSymbol]) || namespace[cjsRequireSymbol]); // require() -export var __require = /* @__PURE__ */ namespace => { - if (__internalIsCommonJSNamespace(namespace)) { - return namespace.default(); - } - - return namespace; +export var __require = /* @__PURE__ */ id => { + return import.meta.require(id); }; export var $$m = __commonJS; diff --git a/test/bundler/esbuild/default.test.ts b/test/bundler/esbuild/default.test.ts index 4d0c78c55..31cbe6169 100644 --- a/test/bundler/esbuild/default.test.ts +++ b/test/bundler/esbuild/default.test.ts @@ -985,10 +985,10 @@ describe("bundler", () => { itBundled("default/RequireTxt", { files: { "/entry.js": `console.log(require('./test.txt'))`, - "/test.txt": `This is a test.`, + "/test.txt": `This is a\` test.`, }, run: { - stdout: "This is a test.", + stdout: "This is a` test.", }, }); itBundled("default/RequireBadExtension", { diff --git a/test/bundler/esbuild/importstar.test.ts b/test/bundler/esbuild/importstar.test.ts index 8231b7043..482655236 100644 --- a/test/bundler/esbuild/importstar.test.ts +++ b/test/bundler/esbuild/importstar.test.ts @@ -1260,9 +1260,9 @@ describe("bundler", () => { entryPoints: ["/entry-nope.js", "/entry-default.js"], bundleWarnings: { "/entry-nope.js": [ - 'WARNING: Import "nope" will always be undefined because the file "empty.js" has no exports', - 'WARNING: Import "nope" will always be undefined because the file "empty.mjs" has no exports', - 'WARNING: Import "nope" will always be undefined because the file "empty.cjs" has no exports', + 'Import "nope" will always be undefined because the file "empty.js" has no exports', + 'Import "nope" will always be undefined because the file "empty.mjs" has no exports', + 'Import "nope" will always be undefined because the file "empty.cjs" has no exports', ], }, run: [ diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index cd62ce515..460e484b4 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -561,7 +561,7 @@ export function expectBundled(id: string, opts: BundlerTestInput, dryRun?: boole let unexpectedWarnings = []; for (const error of allWarnings) { - const i = warningsLeft.findIndex(item => error.file === item.file && item.error === error.error); + const i = warningsLeft.findIndex(item => error.file === item.file && item.error.includes(error.error)); if (i === -1) { unexpectedWarnings.push(error); } else { |