aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2023-04-10 06:35:41 -0700
committerGravatar GitHub <noreply@github.com> 2023-04-10 06:35:41 -0700
commit373248ce9c2915494b6dac5da958a39e236a5c10 (patch)
treeb66908e9775563518b536293da7fd7d80c0f56ae
parent5c37d0f2a57af40352ef8a30f941551c24fe06eb (diff)
downloadbun-373248ce9c2915494b6dac5da958a39e236a5c10.tar.gz
bun-373248ce9c2915494b6dac5da958a39e236a5c10.tar.zst
bun-373248ce9c2915494b6dac5da958a39e236a5c10.zip
Implement TOML & JSON support in Bun's new bundler (#2609)
* Implement JSON & TOML support in the bundler * Fix failing to bind namespace imports * Support namespace exports better --------- Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
-rw-r--r--src/api/schema.d.ts6
-rw-r--r--src/api/schema.js8
-rw-r--r--src/api/schema.peechy2
-rw-r--r--src/api/schema.zig6
-rw-r--r--src/bundler/bundle_v2.zig755
-rw-r--r--src/fs.zig15
-rw-r--r--src/js_ast.zig26
-rw-r--r--src/js_lexer.zig8
-rw-r--r--src/js_parser.zig100
-rw-r--r--src/options.zig44
-rw-r--r--src/resolver/data_url.zig27
-rw-r--r--src/resolver/resolver.zig3
-rw-r--r--test/bundler/esbuild/loader.test.ts92
13 files changed, 762 insertions, 330 deletions
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index e406fbc52..6148aa10a 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -24,6 +24,8 @@ export const enum Loader {
toml = 8,
wasm = 9,
napi = 10,
+ base64 = 11,
+ dataurl = 12,
}
export const LoaderKeys: {
1: "jsx";
@@ -46,6 +48,10 @@ export const LoaderKeys: {
wasm: "wasm";
10: "napi";
napi: "napi";
+ 11: "base64";
+ base64: "base64";
+ 12: "dataurl";
+ dataurl: "dataurl";
};
export const enum FrameworkEntryPointType {
client = 1,
diff --git a/src/api/schema.js b/src/api/schema.js
index 16c2ca119..bc8a26215 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -9,6 +9,8 @@ const Loader = {
"8": 8,
"9": 9,
"10": 10,
+ "11": 11,
+ "12": 12,
"jsx": 1,
"js": 2,
"ts": 3,
@@ -19,6 +21,8 @@ const Loader = {
"toml": 8,
"wasm": 9,
"napi": 10,
+ "base64": 11,
+ "dataurl": 12,
};
const LoaderKeys = {
"1": "jsx",
@@ -31,6 +35,8 @@ const LoaderKeys = {
"8": "toml",
"9": "wasm",
"10": "napi",
+ "11": "base64",
+ "12": "dataurl",
"jsx": "jsx",
"js": "js",
"ts": "ts",
@@ -41,6 +47,8 @@ const LoaderKeys = {
"toml": "toml",
"wasm": "wasm",
"napi": "napi",
+ "base64": "base64",
+ "dataurl": "dataurl",
};
const FrameworkEntryPointType = {
"1": 1,
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index f6e3e5617..bba0e40f9 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -11,6 +11,8 @@ smol Loader {
toml = 8;
wasm = 9;
napi = 10;
+ base64 = 11;
+ dataurl = 12;
}
smol FrameworkEntryPointType {
diff --git a/src/api/schema.zig b/src/api/schema.zig
index acac06114..0c5f4cf70 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -362,6 +362,12 @@ pub const Api = struct {
/// napi
napi,
+ /// base64
+ base64,
+
+ /// dataurl
+ dataurl,
+
_,
pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index 808dfaff0..10ad9898c 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -572,7 +572,7 @@ pub const BundleV2 = struct {
result.watcher_data.fd,
result.source.path.text,
bun.hash32(result.source.path.text),
- this.bundler.options.loader(result.source.path.name.ext),
+ result.source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file,
result.watcher_data.dir_fd,
result.watcher_data.package_json,
false,
@@ -771,6 +771,7 @@ const ParseTask = struct {
use_directive: UseDirective = .none,
watcher_data: WatcherData = .{},
+ side_effects: ?_resolver.SideEffects = null,
};
pub const Error = struct {
@@ -795,6 +796,37 @@ const ParseTask = struct {
threadlocal var override_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ fn getAST(
+ log: *Logger.Log,
+ bundler: *Bundler,
+ opts: js_parser.Parser.Options,
+ allocator: std.mem.Allocator,
+ resolver: *Resolver,
+ source: Logger.Source,
+ loader: Loader,
+ ) !js_ast.Ast {
+ switch (loader) {
+ .jsx, .tsx, .js, .ts => {
+ return (try resolver.caches.js.parse(
+ bundler.allocator,
+ opts,
+ bundler.options.define,
+ log,
+ &source,
+ )) orelse return js_ast.Ast.empty;
+ },
+ .json => {
+ const root = (try resolver.caches.json.parseJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty);
+ return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?;
+ },
+ .toml => {
+ const root = try TOML.parse(&source, log, allocator);
+ return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?;
+ },
+ else => return js_ast.Ast.empty,
+ }
+ }
+
fn run_(
task: *ParseTask,
this: *ThreadPool.Worker,
@@ -874,12 +906,9 @@ const ParseTask = struct {
};
step.* = .parse;
- if (entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0)) {
- debug("skipping empty file: {s}", .{file_path.text});
- return null;
- }
+ const is_empty = entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0);
- const use_directive = if (this.ctx.bundler.options.react_server_components)
+ const use_directive = if (!is_empty and this.ctx.bundler.options.react_server_components)
UseDirective.parse(entry.contents)
else
.none;
@@ -893,7 +922,7 @@ const ParseTask = struct {
};
const source_dir = file_path.sourceDir();
- const loader = task.loader orelse bundler.options.loader(file_path.name.ext);
+ const loader = task.loader orelse file_path.loader(&bundler.options.loaders) orelse options.Loader.file;
const platform = use_directive.platform(task.known_platform orelse bundler.options.platform);
var resolve_queue = ResolveQueue.init(bun.default_allocator);
@@ -901,248 +930,255 @@ const ParseTask = struct {
errdefer resolve_queue.clearAndFree();
- switch (loader) {
- .jsx, .tsx, .js, .ts => {
- task.jsx.parse = loader.isJSX();
-
- var opts = js_parser.Parser.Options.init(task.jsx, loader);
- opts.transform_require_to_import = false;
- opts.can_import_from_bundle = false;
- opts.features.allow_runtime = !source.index.isRuntime();
- opts.features.dynamic_require = platform.isBun();
- opts.warn_about_unbundled_modules = false;
- opts.macro_context = &this.data.macro_context;
- opts.bundle = true;
- opts.features.top_level_await = true;
- opts.features.jsx_optimization_inline = platform.isBun() and (bundler.options.jsx_optimization_inline orelse !task.jsx.development);
- opts.features.auto_import_jsx = !opts.features.jsx_optimization_inline and task.jsx.parse and bundler.options.auto_import_jsx;
- opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript();
- opts.tree_shaking = task.tree_shaking;
- opts.module_type = task.module_type;
-
- var ast = (try resolver.caches.js.parse(
- bundler.allocator,
- opts,
- bundler.options.define,
- log,
- &source,
- )) orelse return error.EmptyAST;
-
- step.* = .resolve;
- ast.platform = platform;
- var estimated_resolve_queue_count: usize = 0;
- for (ast.import_records.slice()) |*import_record| {
- if (import_record.is_internal) {
- import_record.tag = .runtime;
- import_record.source_index = Index.runtime;
- }
+ var opts = js_parser.Parser.Options.init(task.jsx, loader);
+ opts.transform_require_to_import = false;
+ opts.can_import_from_bundle = false;
+ opts.features.allow_runtime = !source.index.isRuntime();
+ opts.features.dynamic_require = platform.isBun();
+ opts.warn_about_unbundled_modules = false;
+ opts.macro_context = &this.data.macro_context;
+ opts.bundle = true;
+ opts.features.top_level_await = true;
+ opts.features.jsx_optimization_inline = platform.isBun() and (bundler.options.jsx_optimization_inline orelse !task.jsx.development);
+ opts.features.auto_import_jsx = !opts.features.jsx_optimization_inline and task.jsx.parse and bundler.options.auto_import_jsx;
+ opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript();
+ opts.tree_shaking = task.tree_shaking;
+ opts.module_type = task.module_type;
+ task.jsx.parse = loader.isJSX();
+
+ var ast: js_ast.Ast = if (!is_empty)
+ try getAST(log, bundler, opts, allocator, resolver, source, loader)
+ else brk: {
+ var empty = js_ast.Ast.empty;
+ empty.named_imports.allocator = allocator;
+ empty.named_exports.allocator = allocator;
+ var _parts = allocator.alloc(js_ast.Part, 1) catch unreachable;
+ _parts[0] = js_ast.Part{
+ .can_be_removed_if_unused = true,
+ };
+ empty.parts = BabyList(js_ast.Part).init(_parts[0..1]);
+ break :brk empty;
+ };
- if (import_record.is_unused) {
- import_record.source_index = Index.invalid;
- }
+ ast.platform = platform;
+ if (ast.parts.len <= 1) {
+ task.side_effects = _resolver.SideEffects.no_side_effects__empty_ast;
+ }
+
+ var estimated_resolve_queue_count: usize = 0;
+ for (ast.import_records.slice()) |*import_record| {
+ if (import_record.is_internal) {
+ import_record.tag = .runtime;
+ import_record.source_index = Index.runtime;
+ }
+
+ if (import_record.is_unused) {
+ import_record.source_index = Index.invalid;
+ }
+
+ // Don't resolve the runtime
+ if (import_record.is_internal or import_record.is_unused) {
+ continue;
+ }
+ estimated_resolve_queue_count += 1;
+ }
+
+ try resolve_queue.ensureUnusedCapacity(estimated_resolve_queue_count);
+ var last_error: ?anyerror = null;
+ for (ast.import_records.slice()) |*import_record| {
+ // Don't resolve the runtime
+ if (import_record.is_unused or import_record.is_internal) {
+ continue;
+ }
- // Don't resolve the runtime
- if (import_record.is_internal or import_record.is_unused) {
+ if (platform.isBun()) {
+ if (JSC.HardcodedModule.Aliases.get(import_record.path.text)) |replacement| {
+ import_record.path.text = replacement.path;
+ import_record.tag = replacement.tag;
+ import_record.source_index = Index.invalid;
+ continue;
+ }
+
+ if (JSC.DisabledModule.has(import_record.path.text)) {
+ import_record.path.is_disabled = true;
+ import_record.do_commonjs_transform_in_printer = true;
+ import_record.source_index = Index.invalid;
+ continue;
+ }
+
+ if (bundler.options.rewrite_jest_for_tests) {
+ if (strings.eqlComptime(
+ import_record.path.text,
+ "@jest/globals",
+ ) or strings.eqlComptime(
+ import_record.path.text,
+ "vitest",
+ )) {
+ import_record.path.namespace = "bun";
+ import_record.tag = .bun_test;
+ import_record.path.text = "test";
continue;
}
- estimated_resolve_queue_count += 1;
}
- try resolve_queue.ensureUnusedCapacity(estimated_resolve_queue_count);
- var last_error: ?anyerror = null;
- for (ast.import_records.slice()) |*import_record| {
- // Don't resolve the runtime
- if (import_record.is_unused or import_record.is_internal) {
- continue;
+ if (strings.hasPrefixComptime(import_record.path.text, "bun:")) {
+ import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]);
+ import_record.path.namespace = "bun";
+ import_record.source_index = Index.invalid;
+
+ if (strings.eqlComptime(import_record.path.text, "test")) {
+ import_record.tag = .bun_test;
}
- if (platform.isBun()) {
- if (JSC.HardcodedModule.Aliases.get(import_record.path.text)) |replacement| {
- import_record.path.text = replacement.path;
- import_record.tag = replacement.tag;
- import_record.source_index = Index.invalid;
- continue;
- }
+ // don't link bun
+ continue;
+ }
+ }
- if (JSC.DisabledModule.has(import_record.path.text)) {
- import_record.path.is_disabled = true;
- import_record.do_commonjs_transform_in_printer = true;
- import_record.source_index = Index.invalid;
- continue;
- }
+ if (resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |_resolved_import| {
+ var resolve_result = _resolved_import;
+ // if there were errors, lets go ahead and collect them all
+ if (last_error != null) continue;
- if (bundler.options.rewrite_jest_for_tests) {
- if (strings.eqlComptime(
- import_record.path.text,
- "@jest/globals",
- ) or strings.eqlComptime(
- import_record.path.text,
- "vitest",
- )) {
- import_record.path.namespace = "bun";
- import_record.tag = .bun_test;
- import_record.path.text = "test";
- continue;
- }
- }
+ var path: *Fs.Path = resolve_result.path() orelse {
+ import_record.path.is_disabled = true;
+ import_record.source_index = Index.invalid;
- if (strings.hasPrefixComptime(import_record.path.text, "bun:")) {
- import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]);
- import_record.path.namespace = "bun";
- import_record.source_index = Index.invalid;
+ continue;
+ };
- if (strings.eqlComptime(import_record.path.text, "test")) {
- import_record.tag = .bun_test;
- }
+ if (resolve_result.is_external) {
+ continue;
+ }
- // don't link bun
- continue;
- }
- }
+ var resolve_entry = try resolve_queue.getOrPut(wyhash(0, path.text));
+ if (resolve_entry.found_existing) {
+ import_record.path = resolve_entry.value_ptr.path;
- if (resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |_resolved_import| {
- var resolve_result = _resolved_import;
- // if there were errors, lets go ahead and collect them all
- if (last_error != null) continue;
+ continue;
+ }
- var path: *Fs.Path = resolve_result.path() orelse {
- import_record.path.is_disabled = true;
- import_record.source_index = Index.invalid;
+ if (path.pretty.ptr == path.text.ptr) {
+ // TODO: outbase
+ const rel = bun.path.relative(bundler.fs.top_level_dir, path.text);
+ if (rel.len > 0 and rel[0] != '.') {
+ path.pretty = rel;
+ }
+ }
- continue;
- };
+ var secondary_path_to_copy: ?Fs.Path = null;
+ if (resolve_result.path_pair.secondary) |*secondary| {
+ if (!secondary.is_disabled and
+ secondary != path and
+ !strings.eqlLong(secondary.text, path.text, true))
+ {
+ secondary_path_to_copy = try secondary.dupeAlloc(allocator);
+ }
+ }
- if (resolve_result.is_external) {
- continue;
- }
+ path.* = try path.dupeAlloc(allocator);
+ import_record.path = path.*;
+ debug("created ParseTask: {s}", .{path.text});
- var resolve_entry = try resolve_queue.getOrPut(wyhash(0, path.text));
- if (resolve_entry.found_existing) {
- import_record.path = resolve_entry.value_ptr.path;
+ resolve_entry.value_ptr.* = ParseTask.init(&resolve_result, null);
+ resolve_entry.value_ptr.secondary_path_for_commonjs_interop = secondary_path_to_copy;
- continue;
- }
+ if (use_directive != .none) {
+ resolve_entry.value_ptr.known_platform = platform;
+ } else if (task.known_platform) |known_platform| {
+ resolve_entry.value_ptr.known_platform = known_platform;
+ }
- if (path.pretty.ptr == path.text.ptr) {
- // TODO: outbase
- const rel = bun.path.relative(bundler.fs.top_level_dir, path.text);
- if (rel.len > 0 and rel[0] != '.') {
- path.pretty = rel;
- }
- }
+ resolve_entry.value_ptr.jsx.development = task.jsx.development;
- var secondary_path_to_copy: ?Fs.Path = null;
- if (resolve_result.path_pair.secondary) |*secondary| {
- if (!secondary.is_disabled and
- secondary != path and
- !strings.eqlLong(secondary.text, path.text, true))
- {
- secondary_path_to_copy = try secondary.dupeAlloc(allocator);
+ if (resolve_entry.value_ptr.loader == null) {
+ resolve_entry.value_ptr.loader = path.loader(&bundler.options.loaders);
+ resolve_entry.value_ptr.tree_shaking = task.tree_shaking;
+ }
+ } else |err| {
+ // Disable failing packages from being printed.
+ // This may cause broken code to write.
+ // However, doing this means we tell them all the resolve errors
+ // Rather than just the first one.
+ import_record.path.is_disabled = true;
+
+ switch (err) {
+ error.ModuleNotFound => {
+ const addError = Logger.Log.addResolveErrorWithTextDupe;
+
+ if (!import_record.handles_import_errors) {
+ last_error = err;
+ if (isPackagePath(import_record.path.text)) {
+ if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
+ try addError(
+ log,
+ &source,
+ import_record.range,
+ this.allocator,
+ "Could not resolve Node.js builtin: \"{s}\".",
+ .{import_record.path.text},
+ import_record.kind,
+ );
+ } else {
+ try addError(
+ log,
+ &source,
+ import_record.range,
+ this.allocator,
+ "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?",
+ .{import_record.path.text},
+ import_record.kind,
+ );
+ }
+ } else {
+ try addError(
+ log,
+ &source,
+ import_record.range,
+ this.allocator,
+ "Could not resolve: \"{s}\"",
+ .{
+ import_record.path.text,
+ },
+ import_record.kind,
+ );
}
}
+ },
+ // assume other errors are already in the log
+ else => {
+ last_error = err;
+ },
+ }
+ }
+ }
- path.* = try path.dupeAlloc(allocator);
- import_record.path = path.*;
- debug("created ParseTask: {s}", .{path.text});
-
- resolve_entry.value_ptr.* = ParseTask.init(&resolve_result, null);
- resolve_entry.value_ptr.secondary_path_for_commonjs_interop = secondary_path_to_copy;
- if (use_directive != .none) {
- resolve_entry.value_ptr.known_platform = platform;
- } else if (task.known_platform) |known_platform| {
- resolve_entry.value_ptr.known_platform = known_platform;
- }
+ if (last_error) |err| {
+ debug("failed with error: {s}", .{@errorName(err)});
+ return err;
+ }
- resolve_entry.value_ptr.jsx.development = task.jsx.development;
+ // Allow the AST to outlive this call
+ _ = js_ast.Expr.Data.Store.toOwnedSlice();
+ _ = js_ast.Stmt.Data.Store.toOwnedSlice();
- if (resolve_entry.value_ptr.loader == null) {
- resolve_entry.value_ptr.loader = bundler.options.loader(path.name.ext);
- resolve_entry.value_ptr.tree_shaking = task.tree_shaking;
- }
- } else |err| {
- // Disable failing packages from being printed.
- // This may cause broken code to write.
- // However, doing this means we tell them all the resolve errors
- // Rather than just the first one.
- import_record.path.is_disabled = true;
-
- switch (err) {
- error.ModuleNotFound => {
- const addError = Logger.Log.addResolveErrorWithTextDupe;
-
- if (!import_record.handles_import_errors) {
- last_error = err;
- if (isPackagePath(import_record.path.text)) {
- if (platform.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve Node.js builtin: \"{s}\".",
- .{import_record.path.text},
- import_record.kind,
- );
- } else {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?",
- .{import_record.path.text},
- import_record.kind,
- );
- }
- } else {
- try addError(
- log,
- &source,
- import_record.range,
- this.allocator,
- "Could not resolve: \"{s}\"",
- .{
- import_record.path.text,
- },
- import_record.kind,
- );
- }
- }
- },
- // assume other errors are already in the log
- else => {
- last_error = err;
- },
- }
- }
- }
+ // never a react client component if RSC is not enabled.
+ std.debug.assert(use_directive == .none or bundler.options.react_server_components);
- if (last_error) |err| {
- debug("failed with error: {s}", .{@errorName(err)});
- return err;
- }
+ step.* = .resolve;
+ ast.platform = platform;
- // Allow the AST to outlive this call
- _ = js_ast.Expr.Data.Store.toOwnedSlice();
- _ = js_ast.Stmt.Data.Store.toOwnedSlice();
-
- // never a react client component if RSC is not enabled.
- std.debug.assert(use_directive == .none or bundler.options.react_server_components);
-
- return Result.Success{
- .ast = ast,
- .source = source,
- .resolve_queue = resolve_queue,
- .log = log.*,
- .use_directive = use_directive,
- .watcher_data = .{
- .fd = if (task.contents_or_fd == .fd) task.contents_or_fd.fd.file else 0,
- .dir_fd = if (task.contents_or_fd == .fd) task.contents_or_fd.fd.dir else 0,
- },
- };
+ return Result.Success{
+ .ast = ast,
+ .source = source,
+ .resolve_queue = resolve_queue,
+ .log = log.*,
+ .use_directive = use_directive,
+ .watcher_data = .{
+ .fd = if (task.contents_or_fd == .fd) task.contents_or_fd.fd.file else 0,
+ .dir_fd = if (task.contents_or_fd == .fd) task.contents_or_fd.fd.dir else 0,
},
- else => return null,
- }
+ };
}
pub fn callback(this: *ThreadPoolLib.Task) void {
@@ -1417,6 +1453,7 @@ pub const Graph = struct {
source: Logger.Source,
loader: options.Loader = options.Loader.file,
side_effects: _resolver.SideEffects = _resolver.SideEffects.has_side_effects,
+ additional_files: BabyList(Index.Int) = .{},
pub const List = MultiArrayList(InputFile);
};
@@ -2435,6 +2472,167 @@ const LinkerContext = struct {
chunk.content.javascript.parts_in_chunk_in_order = parts_in_chunk_order;
}
+ pub fn generateNamedExportInFile(this: *LinkerContext, source_index: Index.Int, module_ref: Ref, name: []const u8, alias: []const u8) !struct { Ref, u32 } {
+ const ref = this.graph.generateNewSymbol(source_index, .other, name);
+ const part_index = this.graph.addPartToFile(source_index, .{
+ .declared_symbols = js_ast.DeclaredSymbol.List.fromSlice(
+ this.allocator,
+ &[_]js_ast.DeclaredSymbol{
+ .{ .ref = ref, .is_top_level = true },
+ },
+ ) catch unreachable,
+ .can_be_removed_if_unused = true,
+ }) catch unreachable;
+
+ try this.graph.generateSymbolImportAndUse(source_index, part_index, module_ref, 1, Index.init(source_index));
+ var top_level = &this.graph.meta.items(.top_level_symbol_to_parts_overlay)[source_index];
+ var parts_list = this.allocator.alloc(u32, 1) catch unreachable;
+ parts_list[0] = part_index;
+
+ top_level.put(this.allocator, ref, BabyList(u32).init(parts_list)) catch unreachable;
+
+ var resolved_exports = &this.graph.meta.items(.resolved_exports)[source_index];
+ resolved_exports.put(this.allocator, alias, ExportData{
+ .data = ImportTracker{
+ .source_index = Index.init(source_index),
+ .import_ref = ref,
+ },
+ }) catch unreachable;
+ return .{ ref, part_index };
+ }
+
+ fn generateCodeForLazyExport(this: *LinkerContext, source_index: Index.Int) !void {
+ const exports_kind = this.graph.ast.items(.exports_kind)[source_index];
+ var parts = &this.graph.ast.items(.parts)[source_index];
+
+ if (parts.len < 1) {
+ @panic("Internal error: expected at least one part for lazy export");
+ }
+
+ var part: *js_ast.Part = &parts.ptr[1];
+
+ if (part.stmts.len == 0) {
+ @panic("Internal error: expected at least one statement in the lazy export");
+ }
+
+ const stmt: Stmt = part.stmts[0];
+ if (stmt.data != .s_lazy_export) {
+ @panic("Internal error: expected top-level lazy export statement");
+ }
+
+ const expr = Expr{
+ .data = stmt.data.s_lazy_export,
+ .loc = stmt.loc,
+ };
+ const module_ref = this.graph.ast.items(.module_ref)[source_index].?;
+
+ switch (exports_kind) {
+ .cjs => {
+ part.stmts[0] = Stmt.assign(
+ Expr.init(
+ E.Dot,
+ E.Dot{
+ .target = Expr.initIdentifier(module_ref, stmt.loc),
+ .name = "exports",
+ .name_loc = stmt.loc,
+ },
+ stmt.loc,
+ ),
+ expr,
+ this.allocator,
+ );
+ try this.graph.generateSymbolImportAndUse(source_index, 0, module_ref, 1, Index.init(source_index));
+ },
+ else => {
+ // Otherwise, generate ES6 export statements. These are added as additional
+ // parts so they can be tree shaken individually.
+ part.stmts.len = 0;
+
+ if (expr.data == .e_object) {
+ for (expr.data.e_object.properties.slice()) |property_| {
+ const property: G.Property = property_;
+ if (property.key == null or property.key.?.data != .e_string or property.value == null or
+ property.key.?.data.e_string.eqlComptime("default") or property.key.?.data.e_string.eqlComptime("__esModule"))
+ {
+ continue;
+ }
+
+ const name = property.key.?.data.e_string.slice(this.allocator);
+
+ // TODO: support non-identifier names
+ if (!bun.js_lexer.isIdentifier(name))
+ continue;
+
+ // This initializes the generated variable with a copy of the property
+ // value, which is INCORRECT for values that are objects/arrays because
+ // they will have separate object identity. This is fixed up later in
+ // "generateCodeForFileInChunkJS" by changing the object literal to
+ // reference this generated variable instead.
+ //
+ // Changing the object literal is deferred until that point instead of
+ // doing it now because we only want to do this for top-level variables
+ // that actually end up being used, and we don't know which ones will
+ // end up actually being used at this point (since import binding hasn't
+ // happened yet). So we need to wait until after tree shaking happens.
+ const generated = try this.generateNamedExportInFile(source_index, module_ref, name, name);
+ parts.ptr[generated[1]].stmts = this.allocator.alloc(Stmt, 1) catch unreachable;
+ parts.ptr[generated[1]].stmts[0] = Stmt.alloc(
+ S.Local,
+ S.Local{
+ .is_export = true,
+ .decls = bun.fromSlice(
+ []js_ast.G.Decl,
+ this.allocator,
+ []const js_ast.G.Decl,
+ &.{
+ .{
+ .binding = Binding.alloc(
+ this.allocator,
+ B.Identifier{
+ .ref = generated[0],
+ },
+ expr.loc,
+ ),
+ .value = property.value.?,
+ },
+ },
+ ) catch unreachable,
+ },
+ property.key.?.loc,
+ );
+ }
+ }
+
+ {
+ const generated = try this.generateNamedExportInFile(
+ source_index,
+ module_ref,
+ std.fmt.allocPrint(
+ this.allocator,
+ "{}_default",
+ .{this.parse_graph.input_files.items(.source)[source_index].fmtIdentifier()},
+ ) catch unreachable,
+ "default",
+ );
+ parts.ptr[generated[1]].stmts = this.allocator.alloc(Stmt, 1) catch unreachable;
+ parts.ptr[generated[1]].stmts[0] = Stmt.alloc(
+ S.ExportDefault,
+ S.ExportDefault{
+ .default_name = .{
+ .ref = generated[0],
+ .loc = stmt.loc,
+ },
+ .value = .{
+ .expr = expr,
+ },
+ },
+ stmt.loc,
+ );
+ }
+ },
+ }
+ }
+
pub fn scanImportsAndExports(this: *LinkerContext) !void {
const reachable = this.graph.reachable_files;
const output_format = this.options.output_format;
@@ -2450,6 +2648,7 @@ const LinkerContext = struct {
var export_star_import_records: [][]u32 = this.graph.ast.items(.export_star_import_records);
var exports_refs: []Ref = this.graph.ast.items(.exports_ref);
var module_refs: []?Ref = this.graph.ast.items(.module_ref);
+ var lazy_exports: []bool = this.graph.ast.items(.has_lazy_export);
var symbols = &this.graph.symbols;
defer this.graph.symbols = symbols.*;
@@ -2494,7 +2693,7 @@ const LinkerContext = struct {
// In that case the module *is* considered a CommonJS module because
// the namespace object must be created.
if ((record.contains_import_star or record.contains_default_alias) and
- // TODO: hasLazyExport
+ !lazy_exports[other_file] and
exports_kind[other_file] == .none)
{
exports_kind[other_file] = .cjs;
@@ -2619,13 +2818,16 @@ const LinkerContext = struct {
var export_star_ctx: ?ExportStarContext = null;
var resolved_exports: []ResolvedExports = this.graph.meta.items(.resolved_exports);
var resolved_export_stars: []ExportData = this.graph.meta.items(.resolved_export_star);
+ var has_lazy_export: []bool = this.graph.ast.items(.has_lazy_export);
for (reachable) |source_index_| {
const source_index = source_index_.get();
const id = source_index;
// --
- // TODO: generateCodeForLazyExport here!
+ if (has_lazy_export[id]) {
+ try this.generateCodeForLazyExport(id);
+ }
// --
// Propagate exports for export star statements
@@ -2871,8 +3073,7 @@ const LinkerContext = struct {
if (named_imports[id].get(ref)) |named_import| {
for (named_import.local_parts_with_uses.slice()) |part_index| {
var part: *js_ast.Part = &parts[part_index];
- const parts_declaring_symbol: []u32 =
- this.graph.ast.items(.top_level_symbols_to_parts)[import_id].get(import.data.import_ref).?.slice();
+ const parts_declaring_symbol: []u32 = this.graph.topLevelSymbolToParts(import_id, import.data.import_ref);
part.dependencies.ensureUnusedCapacity(
this.allocator,
@@ -5692,6 +5893,14 @@ const LinkerContext = struct {
stmts.reset();
+ const part_index_for_lazy_default_export: u32 = if (ast.has_lazy_export) brk: {
+ if (c.graph.meta.items(.resolved_exports)[part_range.source_index.get()].get("default")) |default| {
+ break :brk c.graph.topLevelSymbolToParts(part_range.source_index.get(), default.data.import_ref)[0];
+ }
+
+ break :brk std.math.maxInt(u32);
+ } else std.math.maxInt(u32);
+
// TODO: handle directive
if (namespace_export_part_index >= part_range.part_index_begin and
namespace_export_part_index < part_range.part_index_end and
@@ -5720,8 +5929,6 @@ const LinkerContext = struct {
stmts.inside_wrapper_suffix.clearRetainingCapacity();
}
- // TODO: defaultLazyExport
-
// Add all other parts in this chunk
for (parts, 0..) |part, index_| {
const index = part_range.part_index_begin + @truncate(u32, index_);
@@ -5742,13 +5949,101 @@ const LinkerContext = struct {
continue;
}
- // TODO: lazy default export
+ var single_stmts_list = [1]Stmt{undefined};
+ var part_stmts = part.stmts;
+
+ // If this could be a JSON or TOML file that exports a top-level object literal, go
+ // over the non-default top-level properties that ended up being imported
+ // and substitute references to them into the main top-level object literal.
+ // So this JSON file:
+ //
+ // {
+ // "foo": [1, 2, 3],
+ // "bar": [4, 5, 6],
+ // }
+ //
+ // is initially compiled into this:
+ //
+ // export var foo = [1, 2, 3];
+ // export var bar = [4, 5, 6];
+ // export default {
+ // foo: [1, 2, 3],
+ // bar: [4, 5, 6],
+ // };
+ //
+ // But we turn it into this if both "foo" and "default" are imported:
+ //
+ // export var foo = [1, 2, 3];
+ // export default {
+ // foo,
+ // bar: [4, 5, 6],
+ // };
+ //
+ if (index == part_index_for_lazy_default_export) {
+ std.debug.assert(index != std.math.maxInt(u32));
+
+ const stmt = part_stmts[0];
+
+ if (stmt.data != .s_export_default)
+ @panic("expected Lazy default export to be an export default statement");
+
+ var default_export = stmt.data.s_export_default;
+ var default_expr = default_export.value.expr;
+
+ // Be careful: the top-level value in a JSON file is not necessarily an object
+ if (default_expr.data == .e_object) {
+ var new_properties = std.ArrayList(js_ast.G.Property).initCapacity(temp_allocator, default_expr.data.e_object.properties.len) catch unreachable;
+ var resolved_exports = c.graph.meta.items(.resolved_exports)[part_range.source_index.get()];
+
+ // If any top-level properties ended up being imported directly, change
+ // the property to just reference the corresponding variable instead
+ for (default_expr.data.e_object.properties.slice()) |prop| {
+ if (prop.key == null or prop.key.?.data != .e_string or prop.value == null) continue;
+ const name = prop.key.?.data.e_string.slice(temp_allocator);
+ if (strings.eqlComptime(name, "default") or
+ strings.eqlComptime(name, "__esModule") or
+ !bun.js_lexer.isIdentifier(name)) continue;
+
+ if (resolved_exports.get(name)) |export_data| {
+ const export_ref = export_data.data.import_ref;
+ const export_part = ast.parts.slice()[c.graph.topLevelSymbolToParts(part_range.source_index.get(), export_ref)[0]];
+ if (export_part.is_live) {
+ new_properties.appendAssumeCapacity(
+ .{
+ .key = prop.key,
+ .value = Expr.initIdentifier(export_ref, prop.value.?.loc),
+ },
+ );
+ }
+ }
+ }
+
+ default_expr = Expr.allocate(
+ temp_allocator,
+ E.Object,
+ E.Object{
+ .properties = BabyList(G.Property).init(new_properties.items),
+ },
+ default_expr.loc,
+ );
+ }
+
+ single_stmts_list[0] = Stmt.allocate(
+ temp_allocator,
+ S.ExportDefault,
+ .{
+ .default_name = default_export.default_name,
+ .value = .{ .expr = default_expr },
+ },
+ stmt.loc,
+ );
+ part_stmts = single_stmts_list[0..];
+ }
- // convert
c.convertStmtsForChunk(
part_range.source_index.get(),
stmts,
- part.stmts,
+ part_stmts,
chunk,
temp_allocator,
flags.wrap,
@@ -6930,7 +7225,7 @@ const LinkerContext = struct {
// Is this a named import of a file without any exports?
if (!named_import.alias_is_star and
- // TODO hasLazyExport
+ !c.parse_graph.ast.items(.has_lazy_export)[other_id] and
// CommonJS exports
c.graph.ast.items(.export_keyword)[other_id].len == 0 and !strings.eqlComptime(named_import.alias orelse "", "default") and
diff --git a/src/fs.zig b/src/fs.zig
index a3b50ec10..3f0303e32 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -1193,6 +1193,21 @@ pub const Path = struct {
is_disabled: bool = false,
is_symlink: bool = false,
+ pub fn loader(this: *const Path, loaders: *const bun.options.Loader.HashTable) ?bun.options.Loader {
+ if (this.isDataURL()) {
+ return bun.options.Loader.dataurl;
+ }
+
+ // without the leading .
+ const ext = strings.trimLeadingChar(this.name.ext, '.');
+
+ return loaders.get(ext) orelse bun.options.Loader.fromString(ext);
+ }
+
+ pub fn isDataURL(this: *const Path) bool {
+ return strings.eqlComptime(this.namespace, "dataurl");
+ }
+
pub fn isBun(this: *const Path) bool {
return strings.eqlComptime(this.namespace, "bun");
}
diff --git a/src/js_ast.zig b/src/js_ast.zig
index cb291dc0f..743110d0b 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -278,7 +278,13 @@ pub const AssignTarget = enum(u2) {
}
};
-pub const LocRef = struct { loc: logger.Loc = logger.Loc.Empty, ref: ?Ref = null };
+pub const LocRef = struct {
+ loc: logger.Loc = logger.Loc.Empty,
+
+ // TODO: remove this optional and make Ref a function getter
+ // That will make this struct 128 bits instead of 192 bits and we can remove some heap allocations
+ ref: ?Ref = null,
+};
pub const Flags = struct {
pub const JSXElement = enum {
@@ -2171,7 +2177,6 @@ pub const Stmt = struct {
S.Debugger => Stmt.comptime_init("s_debugger", S.Debugger, origData, loc),
S.Directive => Stmt.comptime_init("s_directive", S.Directive, origData, loc),
S.DoWhile => Stmt.comptime_init("s_do_while", S.DoWhile, origData, loc),
- S.Empty => Stmt.comptime_init("s_empty", S.Empty, origData, loc),
S.Enum => Stmt.comptime_init("s_enum", S.Enum, origData, loc),
S.ExportClause => Stmt.comptime_init("s_export_clause", S.ExportClause, origData, loc),
S.ExportDefault => Stmt.comptime_init("s_export_default", S.ExportDefault, origData, loc),
@@ -2186,7 +2191,6 @@ pub const Stmt = struct {
S.If => Stmt.comptime_init("s_if", S.If, origData, loc),
S.Import => Stmt.comptime_init("s_import", S.Import, origData, loc),
S.Label => Stmt.comptime_init("s_label", S.Label, origData, loc),
- S.LazyExport => Stmt.comptime_init("s_lazy_export", S.LazyExport, origData, loc),
S.Local => Stmt.comptime_init("s_local", S.Local, origData, loc),
S.Namespace => Stmt.comptime_init("s_namespace", S.Namespace, origData, loc),
S.Return => Stmt.comptime_init("s_return", S.Return, origData, loc),
@@ -2252,7 +2256,6 @@ pub const Stmt = struct {
S.If => Stmt.comptime_alloc("s_if", S.If, origData, loc),
S.Import => Stmt.comptime_alloc("s_import", S.Import, origData, loc),
S.Label => Stmt.comptime_alloc("s_label", S.Label, origData, loc),
- S.LazyExport => Stmt.comptime_alloc("s_lazy_export", S.LazyExport, origData, loc),
S.Local => Stmt.comptime_alloc("s_local", S.Local, origData, loc),
S.Namespace => Stmt.comptime_alloc("s_namespace", S.Namespace, origData, loc),
S.Return => Stmt.comptime_alloc("s_return", S.Return, origData, loc),
@@ -2299,7 +2302,6 @@ pub const Stmt = struct {
S.If => Stmt.allocateData(allocator, "s_if", S.If, origData, loc),
S.Import => Stmt.allocateData(allocator, "s_import", S.Import, origData, loc),
S.Label => Stmt.allocateData(allocator, "s_label", S.Label, origData, loc),
- S.LazyExport => Stmt.allocateData(allocator, "s_lazy_export", S.LazyExport, origData, loc),
S.Local => Stmt.allocateData(allocator, "s_local", S.Local, origData, loc),
S.Namespace => Stmt.allocateData(allocator, "s_namespace", S.Namespace, origData, loc),
S.Return => Stmt.allocateData(allocator, "s_return", S.Return, origData, loc),
@@ -2330,9 +2332,9 @@ pub const Stmt = struct {
s_export_from,
s_export_star,
s_expr,
+ s_for,
s_for_in,
s_for_of,
- s_for,
s_function,
s_if,
s_import,
@@ -2382,7 +2384,6 @@ pub const Stmt = struct {
s_if: *S.If,
s_import: *S.Import,
s_label: *S.Label,
- s_lazy_export: *S.LazyExport,
s_local: *S.Local,
s_namespace: *S.Namespace,
s_return: *S.Return,
@@ -2396,6 +2397,8 @@ pub const Stmt = struct {
s_empty: S.Empty, // special case, its a zero value type
s_debugger: S.Debugger,
+ s_lazy_export: Expr.Data,
+
pub const Store = struct {
const Union = [_]type{
S.Block,
@@ -2419,7 +2422,6 @@ pub const Stmt = struct {
S.If,
S.Import,
S.Label,
- S.LazyExport,
S.Local,
S.Namespace,
S.Return,
@@ -4779,10 +4781,6 @@ pub const S = struct {
// This is an "export = value;" statement in TypeScript
pub const ExportEquals = struct { value: ExprNodeIndex };
- // The decision of whether to export an expression using "module.exports" or
- // "export default" is deferred until linking using this statement kind
- pub const LazyExport = struct { value: ExprNodeIndex };
-
pub const Label = struct { name: LocRef, stmt: StmtNodeIndex };
// This is a stand-in for a TypeScript type declaration
@@ -5263,8 +5261,8 @@ pub const Ast = struct {
export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax
top_level_await_keyword: logger.Range = logger.Range.None,
- // These are stored at the AST level instead of on individual AST nodes so
- // they can be manipulated efficiently without a full AST traversal
+ /// These are stored at the AST level instead of on individual AST nodes so
+ /// they can be manipulated efficiently without a full AST traversal
import_records: ImportRecord.List = .{},
hashbang: ?string = null,
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index d96942f95..f5e00d1ae 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -1952,9 +1952,9 @@ fn NewLexer_(
return lex;
}
- pub fn init(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType {
+ pub fn initWithoutReading(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) LexerType {
var empty_string_literal: JavascriptString = &emptyJavaScriptString;
- var lex = LexerType{
+ return LexerType{
.log = log,
.source = source,
.string_literal = empty_string_literal,
@@ -1963,6 +1963,10 @@ fn NewLexer_(
.allocator = allocator,
.comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator),
};
+ }
+
+ pub fn init(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType {
+ var lex = initWithoutReading(log, source, allocator);
lex.step();
try lex.next();
diff --git a/src/js_parser.zig b/src/js_parser.zig
index f06cea752..37d1765d9 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -852,7 +852,6 @@ pub const ImportScanner = struct {
if (p.import_items_for_namespace.get(st.namespace_ref)) |entry| {
if (entry.count() > 0) {
has_any = true;
- break;
}
}
@@ -924,7 +923,7 @@ pub const ImportScanner = struct {
}
const namespace_ref = st.namespace_ref;
- const convert_star_to_clause = !p.options.bundle or (!p.options.enable_legacy_bundling and !p.options.can_import_from_bundle and p.symbols.items[namespace_ref.innerIndex()].use_count_estimate == 0);
+ const convert_star_to_clause = !p.options.bundle and (!p.options.enable_legacy_bundling and !p.options.can_import_from_bundle and p.symbols.items[namespace_ref.innerIndex()].use_count_estimate == 0);
if (convert_star_to_clause and !keep_unused_imports) {
st.star_name_loc = null;
@@ -985,12 +984,22 @@ pub const ImportScanner = struct {
}
p.named_imports.ensureUnusedCapacity(
- st.items.len + @as(
- usize,
- @boolToInt(st.default_name != null),
- ),
+ st.items.len + @as(usize, @boolToInt(st.default_name != null)) + @as(usize, @boolToInt(st.star_name_loc != null)),
) catch unreachable;
+ if (st.star_name_loc) |loc| {
+ p.named_imports.putAssumeCapacity(
+ namespace_ref,
+ js_ast.NamedImport{
+ .alias_is_star = true,
+ .alias = "",
+ .alias_loc = loc,
+ .namespace_ref = Ref.None,
+ .import_record_index = st.import_record_index,
+ },
+ );
+ }
+
if (st.default_name) |default| {
p.named_imports.putAssumeCapacity(
default.ref.?,
@@ -2731,6 +2740,48 @@ pub const Parser = struct {
scan_pass.approximate_newline_count = p.lexer.approximate_newline_count;
}
+ pub fn toLazyExportAST(this: *Parser, expr: Expr, comptime runtime_api_call: []const u8) !js_ast.Result {
+ var p: JavaScriptParser = undefined;
+ try JavaScriptParser.init(this.allocator, this.log, this.source, this.define, this.lexer, this.options, &p);
+ p.should_fold_typescript_constant_expressions = this.options.features.should_fold_typescript_constant_expressions;
+ defer p.lexer.deinit();
+ var result: js_ast.Result = undefined;
+ try p.prepareForVisitPass();
+
+ var final_expr = expr;
+
+ // Optionally call a runtime API function to transform the expression
+ if (runtime_api_call.len > 0) {
+ var args = try p.allocator.alloc(Expr, 1);
+ args[0] = expr;
+ final_expr = try p.callRuntime(expr.loc, runtime_api_call, args);
+ }
+
+ var ns_export_part = js_ast.Part{
+ .can_be_removed_if_unused = true,
+ };
+
+ var stmts = try p.allocator.alloc(js_ast.Stmt, 1);
+ stmts[0] = Stmt{
+ .data = .{
+ .s_lazy_export = expr.data,
+ },
+ .loc = expr.loc,
+ };
+ var part = js_ast.Part{
+ .stmts = stmts,
+ .symbol_uses = p.symbol_uses,
+ };
+ p.symbol_uses = .{};
+ var parts = try p.allocator.alloc(js_ast.Part, 2);
+ parts[0..2].* = .{ ns_export_part, part };
+
+ result.ast = try p.toAST(parts, js_ast.ExportsKind.none, null);
+ result.ok = true;
+
+ return result;
+ }
+
pub fn parse(self: *Parser) !js_ast.Result {
if (comptime Environment.isWasm) {
self.options.ts = true;
@@ -20686,3 +20737,40 @@ const DeferredArrowArgErrors = struct {
invalid_expr_await: logger.Range = logger.Range.None,
invalid_expr_yield: logger.Range = logger.Range.None,
};
+
+pub fn newLazyExportAST(
+ allocator: std.mem.Allocator,
+ define: *Define,
+ opts: Parser.Options,
+ log_to_copy_into: *logger.Log,
+ expr: Expr,
+ source: *const logger.Source,
+ comptime runtime_api_call: []const u8,
+) anyerror!?js_ast.Ast {
+ var temp_log = logger.Log.init(allocator);
+ var log = &temp_log;
+ var parser = Parser{
+ .options = opts,
+ .allocator = allocator,
+ .lexer = js_lexer.Lexer.initWithoutReading(log, source.*, allocator),
+ .define = define,
+ .source = source,
+ .log = log,
+ };
+
+ var result = parser.toLazyExportAST(
+ expr,
+ runtime_api_call,
+ ) catch |err| {
+ if (temp_log.errors == 0) {
+ log_to_copy_into.addRangeError(source, parser.lexer.range(), @errorName(err)) catch unreachable;
+ }
+
+ temp_log.appendToMaybeRecycled(log_to_copy_into, source) catch {};
+ return null;
+ };
+
+ temp_log.appendToMaybeRecycled(log_to_copy_into, source) catch {};
+ result.ast.has_lazy_export = true;
+ return if (result.ok) result.ast else null;
+}
diff --git a/src/options.zig b/src/options.zig
index f1983fd6a..b0756a396 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -658,7 +658,7 @@ pub const Platform = enum {
};
};
-pub const Loader = enum(u4) {
+pub const Loader = enum {
jsx,
js,
ts,
@@ -669,6 +669,11 @@ pub const Loader = enum(u4) {
toml,
wasm,
napi,
+ base64,
+ dataurl,
+ text,
+
+ pub const HashTable = bun.StringArrayHashMap(Loader);
pub fn canHaveSourceMap(this: Loader) bool {
return switch (this) {
@@ -697,6 +702,7 @@ pub const Loader = enum(u4) {
map.set(Loader.toml, "input.toml");
map.set(Loader.wasm, "input.wasm");
map.set(Loader.napi, "input.node");
+ map.set(Loader.text, "input.txt");
break :brk map;
};
@@ -722,26 +728,28 @@ pub const Loader = enum(u4) {
};
}
+ pub const names = bun.ComptimeStringMap(Loader, .{
+ .{ "js", Loader.js },
+ .{ "jsx", Loader.jsx },
+ .{ "ts", Loader.ts },
+ .{ "tsx", Loader.tsx },
+ .{ "css", Loader.css },
+ .{ "file", Loader.file },
+ .{ "json", Loader.json },
+ .{ "toml", Loader.toml },
+ .{ "wasm", Loader.wasm },
+ .{ "node", Loader.napi },
+ .{ "dataurl", Loader.dataurl },
+ .{ "base64", Loader.base64 },
+ });
+
pub fn fromString(slice_: string) ?Loader {
- const LoaderMatcher = strings.ExactSizeMatcher(4);
var slice = slice_;
if (slice.len > 0 and slice[0] == '.') {
slice = slice[1..];
}
- return switch (LoaderMatcher.matchLower(slice)) {
- LoaderMatcher.case("js") => Loader.js,
- LoaderMatcher.case("jsx") => Loader.jsx,
- LoaderMatcher.case("ts") => Loader.ts,
- LoaderMatcher.case("tsx") => Loader.tsx,
- LoaderMatcher.case("css") => Loader.css,
- LoaderMatcher.case("file") => Loader.file,
- LoaderMatcher.case("json") => Loader.json,
- LoaderMatcher.case("toml") => Loader.toml,
- LoaderMatcher.case("wasm") => Loader.wasm,
- LoaderMatcher.case("node") => Loader.napi,
- else => null,
- };
+ return names.getWithEql(slice, strings.eqlCaseInsensitiveASCIIICheckLength);
}
pub fn supportsClientEntryPoint(this: Loader) bool {
@@ -798,6 +806,10 @@ pub const Loader = enum(u4) {
pub fn isJavaScriptLikeOrJSON(loader: Loader) bool {
return switch (loader) {
.jsx, .js, .ts, .tsx, .json => true,
+
+ // toml is included because we can serialize to the same AST as JSON
+ .toml => true,
+
else => false,
};
}
@@ -1251,7 +1263,7 @@ pub const BundleOptions = struct {
footer: string = "",
banner: string = "",
define: *defines.Define,
- loaders: bun.StringArrayHashMap(Loader),
+ loaders: Loader.HashTable,
resolve_dir: string = "/",
jsx: JSX.Pragma = JSX.Pragma{},
auto_import_jsx: bool = true,
diff --git a/src/resolver/data_url.zig b/src/resolver/data_url.zig
index 1ce6381fb..0ce5b511d 100644
--- a/src/resolver/data_url.zig
+++ b/src/resolver/data_url.zig
@@ -76,29 +76,6 @@ pub const PercentEncoding = struct {
}
};
-pub const MimeType = enum {
- Unsupported,
- TextCSS,
- TextJavaScript,
- ApplicationJSON,
-
- pub const Map = ComptimeStringMap(MimeType, .{
- .{ "text/css", MimeType.TextCSS },
- .{ "text/javascript", MimeType.TextJavaScript },
- .{ "application/json", MimeType.ApplicationJSON },
- });
-
- pub fn decode(str: string) MimeType {
- // Remove things like ";charset=utf-8"
- var mime_type = str;
- if (strings.indexOfChar(mime_type, ';')) |semicolon| {
- mime_type = mime_type[0..semicolon];
- }
-
- return Map.get(mime_type) orelse MimeType.Unsupported;
- }
-};
-
pub const DataURL = struct {
mime_type: string,
data: string,
@@ -124,7 +101,7 @@ pub const DataURL = struct {
return parsed;
}
- pub fn decode_mime_type(d: DataURL) MimeType {
- return MimeType.decode(d.mime_type);
+ pub fn decodeMimeType(d: DataURL) bun.HTTP.MimeType {
+ return bun.HTTP.MimeType.init(d.mime_type);
}
};
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 4ef2c4819..21d09ecbb 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -798,7 +798,8 @@ pub const Resolver = struct {
const data_url: DataURL = _data_url;
// "import 'data:text/javascript,console.log(123)';"
// "@import 'data:text/css,body{background:white}';"
- if (data_url.decode_mime_type() != .Unsupported) {
+ const mime = data_url.decodeMimeType();
+ if (mime.category == .javascript or mime.category == .css or mime.category == .json or mime.category == .text) {
if (r.debug_logs) |*debug| {
debug.addNote("Putting this path in the \"dataurl\" namespace");
r.flushDebugLogs(.success) catch {};
diff --git a/test/bundler/esbuild/loader.test.ts b/test/bundler/esbuild/loader.test.ts
index eb8e38f92..648464d6e 100644
--- a/test/bundler/esbuild/loader.test.ts
+++ b/test/bundler/esbuild/loader.test.ts
@@ -7,6 +7,62 @@ var { describe, test, expect } = testForFile(import.meta.path);
// For debug, all files are written to $TEMP/bun-bundle-tests/loader
describe("bundler", () => {
+ itBundled("loader/LoaderJSONCommonJSAndES6", {
+ // GENERATED
+ files: {
+ "/entry.js": /* js */ `
+ const x_json = require('./x.json')
+ import y_json from './y.json'
+ import {small, if as fi} from './z.json'
+ console.log(JSON.stringify(x_json), JSON.stringify(y_json), small, fi)
+ `,
+ "/x.json": `{"x": true}`,
+ "/y.json": `{"y1": true, "y2": false}`,
+ "/z.json": /* json */ `
+ {
+ "big": "this is a big long line of text that should be discarded",
+ "small": "some small text",
+ "if": "test keyword imports"
+ }
+ `,
+ },
+ run: {
+ stdout: '{"x":true} {} some small text test keyword imports',
+ },
+ });
+
+ itBundled("loader/LoaderJSONSharedWithMultipleEntriesIssue413", {
+ // GENERATED
+ files: {
+ "/a.js": /* js */ `
+ import data from './data.json'
+ import {test} from './data.json';
+ import * as NSData from './data.json';
+
+ console.log('a:', JSON.stringify(data), data.test, test === data.test, NSData.test === data.test, NSData.default === data, NSData.default.test === data.test, JSON.stringify(NSData))
+ `,
+ "/b.js": /* js */ `
+ import data from './data.json'
+ import {test} from './data.json';
+ import * as NSData from './data.json';
+ console.log('b:', JSON.stringify(data), data.test, test === data.test, NSData.test === data.test, NSData.default === data, NSData.default.test === data.test, JSON.stringify(NSData))
+ `,
+ "/data.json": `{"test": 123}`,
+ },
+ entryPoints: ["/a.js", "/b.js"],
+ format: "esm",
+ run: [
+ {
+ file: "/out/a.js",
+ stdout: 'a: {"test":123} 123 true true true true {"test":123,"default":{"test":123}}',
+ },
+ {
+ file: "/out/b.js",
+ stdout: 'b: {"test":123} 123 true true true true {"test":123,"default":{"test":123}}',
+ },
+ ],
+ });
+
return;
itBundled("loader/LoaderFile", {
// GENERATED
@@ -105,26 +161,6 @@ describe("bundler", () => {
"/test.svg": `a\x00b\x80c\xFFd`,
},
});
- itBundled("loader/LoaderJSONCommonJSAndES6", {
- // GENERATED
- files: {
- "/entry.js": /* js */ `
- const x_json = require('./x.json')
- import y_json from './y.json'
- import {small, if as fi} from './z.json'
- console.log(x_json, y_json, small, fi)
- `,
- "/x.json": `{"x": true}`,
- "/y.json": `{"y1": true, "y2": false}`,
- "/z.json": /* json */ `
- {
- "big": "this is a big long line of text that should be discarded",
- "small": "some small text",
- "if": "test keyword imports"
- }
- `,
- },
- });
itBundled("loader/LoaderJSONInvalidIdentifierES6", {
// GENERATED
files: {
@@ -377,22 +413,6 @@ describe("bundler", () => {
format: "iife",
mode: "convertformat",
});
- itBundled("loader/LoaderJSONSharedWithMultipleEntriesIssue413", {
- // GENERATED
- files: {
- "/a.js": /* js */ `
- import data from './data.json'
- console.log('a:', data)
- `,
- "/b.js": /* js */ `
- import data from './data.json'
- console.log('b:', data)
- `,
- "/data.json": `{"test": 123}`,
- },
- entryPoints: ["/a.js", "/b.js"],
- format: "esm",
- });
itBundled("loader/LoaderFileWithQueryParameter", {
// GENERATED
files: {