diff options
author | 2021-09-30 18:58:39 -0700 | |
---|---|---|
committer | 2021-09-30 18:58:39 -0700 | |
commit | fdda1078f8f4b6fa6c24338c3fc538831f356a10 (patch) | |
tree | 6354e37a837dca606959f377f19b7b9c6a954161 | |
parent | 88e7e12a50075515af3be6aed9231880ae5df7b5 (diff) | |
download | bun-fdda1078f8f4b6fa6c24338c3fc538831f356a10.tar.gz bun-fdda1078f8f4b6fa6c24338c3fc538831f356a10.tar.zst bun-fdda1078f8f4b6fa6c24338c3fc538831f356a10.zip |
Support remapping macro paths
-rw-r--r-- | packages/bun-macro-relay/__generated__/FooOperation.ts | 3 | ||||
-rw-r--r-- | packages/bun-macro-relay/bun-macro-relay.tsx | 27 | ||||
-rw-r--r-- | packages/bun-macro-relay/package.json | 10 | ||||
-rw-r--r-- | packages/bun-macro-relay/test/foo.tsx | 11 | ||||
-rw-r--r-- | packages/bun-macro-relay/tsconfig.json | 6 | ||||
-rw-r--r-- | src/bundler.zig | 55 | ||||
-rw-r--r-- | src/css_scanner.zig | 2 | ||||
-rw-r--r-- | src/http.zig | 52 | ||||
-rw-r--r-- | src/javascript/jsc/javascript.zig | 43 | ||||
-rw-r--r-- | src/js_ast.zig | 131 | ||||
-rw-r--r-- | src/js_parser/js_parser.zig | 59 | ||||
-rw-r--r-- | src/resolver/package_json.zig | 66 | ||||
-rw-r--r-- | src/resolver/resolver.zig | 7 | ||||
-rw-r--r-- | src/watcher.zig | 11 |
14 files changed, 392 insertions, 91 deletions
diff --git a/packages/bun-macro-relay/__generated__/FooOperation.ts b/packages/bun-macro-relay/__generated__/FooOperation.ts new file mode 100644 index 000000000..4c83371c9 --- /dev/null +++ b/packages/bun-macro-relay/__generated__/FooOperation.ts @@ -0,0 +1,3 @@ +export class FooOperation {} + +export default FooOperation; diff --git a/packages/bun-macro-relay/bun-macro-relay.tsx b/packages/bun-macro-relay/bun-macro-relay.tsx index eaeca062a..018f8f7f5 100644 --- a/packages/bun-macro-relay/bun-macro-relay.tsx +++ b/packages/bun-macro-relay/bun-macro-relay.tsx @@ -18,9 +18,14 @@ artifactDirectory = artifactDirectory.startsWith("/") : Bun.cwd + artifactDirectory; export function graphql(node) { - const [templateLiteral] = node.arguments; + let query; + + if (node instanceof <call />) { + query = node.arguments[0].toString(); + } else if (node instanceof <template />) { + query = node.toString(); + } - const query = templateLiteral?.toString(); if (typeof query !== "string" || query.length === 0) { throw new Error("BunMacroRelay: Unexpected empty graphql string."); } @@ -56,10 +61,16 @@ export function graphql(node) { /> ); - return ( - <> - <inject>{importStmt}</inject>, - <id to={importStmt.symbols.default} pure />, - </> - ); + try { + const ret = ( + <> + <inject>{importStmt}</inject> + <id to={importStmt.namespace[definitionName]} pure /> + </> + ); + return ret; + } catch (exception) { + console.error(exception); + } + return null; } diff --git a/packages/bun-macro-relay/package.json b/packages/bun-macro-relay/package.json index f811e4177..968a52065 100644 --- a/packages/bun-macro-relay/package.json +++ b/packages/bun-macro-relay/package.json @@ -5,5 +5,15 @@ "license": "MIT", "dependencies": { "graphql": "^15.6.0" + }, + "files": [ + "bun-macro-relay.tsx" + ], + "bun": { + "macros": { + "react-relay": { + "graphql": "../bun-macro-relay.tsx" + } + } } } diff --git a/packages/bun-macro-relay/test/foo.tsx b/packages/bun-macro-relay/test/foo.tsx new file mode 100644 index 000000000..fbb54f551 --- /dev/null +++ b/packages/bun-macro-relay/test/foo.tsx @@ -0,0 +1,11 @@ +import { graphql } from "react-relay"; + +export const Foo = () => { + const definition = graphql` + query FooOperation { + foo + } + `; + + return <div>{definition.operation.name}</div>; +}; diff --git a/packages/bun-macro-relay/tsconfig.json b/packages/bun-macro-relay/tsconfig.json new file mode 100644 index 000000000..19d4ac2e6 --- /dev/null +++ b/packages/bun-macro-relay/tsconfig.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "baseUrl": ".", + "paths": {} + } +} diff --git a/src/bundler.zig b/src/bundler.zig index 64982c822..4b7f82ec4 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -27,6 +27,7 @@ const runtime = @import("./runtime.zig"); const Timer = @import("./timer.zig"); const hash_map = @import("hash_map.zig"); const PackageJSON = @import("./resolver/package_json.zig").PackageJSON; +const MacroRemap = @import("./resolver/package_json.zig").MacroMap; const DebugLogs = _resolver.DebugLogs; const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle; const Router = @import("./router.zig"); @@ -2087,12 +2088,15 @@ pub const Bundler = struct { }, else => { var result = bundler.parse( - allocator, - file_path, - loader, - resolve_result.dirname_fd, - file_descriptor, - filepath_hash, + ParseOptions{ + .allocator = allocator, + .path = file_path, + .loader = loader, + .dirname_fd = resolve_result.dirname_fd, + .file_descriptor = file_descriptor, + .file_hash = filepath_hash, + .macro_remappings = resolve_result.getMacroRemappings(), + }, client_entry_point, ) orelse { bundler.resetStore(); @@ -2175,12 +2179,15 @@ pub const Bundler = struct { switch (loader) { .jsx, .tsx, .js, .ts, .json => { var result = bundler.parse( - bundler.allocator, - file_path, - loader, - resolve_result.dirname_fd, - null, - null, + ParseOptions{ + .allocator = bundler.allocator, + .path = file_path, + .loader = loader, + .dirname_fd = resolve_result.dirname_fd, + .file_descriptor = null, + .file_hash = null, + .macro_remappings = resolve_result.getMacroRemappings(), + }, client_entry_point_, ) orelse { return null; @@ -2333,17 +2340,28 @@ pub const Bundler = struct { }; } - pub fn parse( - bundler: *ThisBundler, + pub const ParseOptions = struct { allocator: *std.mem.Allocator, + dirname_fd: StoredFileDescriptorType, + file_descriptor: ?StoredFileDescriptorType = null, + file_hash: ?u32 = null, path: Fs.Path, loader: options.Loader, - // only used when file_descriptor is null - dirname_fd: StoredFileDescriptorType, - file_descriptor: ?StoredFileDescriptorType, - file_hash: ?u32, + macro_remappings: MacroRemap, + }; + + pub fn parse( + bundler: *ThisBundler, + this_parse: ParseOptions, client_entry_point_: anytype, ) ?ParseResult { + var allocator = this_parse.allocator; + const dirname_fd = this_parse.dirname_fd; + const file_descriptor = this_parse.file_descriptor; + const file_hash = this_parse.file_hash; + const path = this_parse.path; + const loader = this_parse.loader; + if (FeatureFlags.tracing) { bundler.timer.start(); } @@ -2421,6 +2439,7 @@ pub const Bundler = struct { } opts.macro_context = &bundler.macro_context.?; + opts.macro_context.remap = this_parse.macro_remappings; opts.features.is_macro_runtime = bundler.options.platform == .bun_macro; const value = (bundler.resolver.caches.js.parse( diff --git a/src/css_scanner.zig b/src/css_scanner.zig index 4dc6fa106..0adcd9917 100644 --- a/src/css_scanner.zig +++ b/src/css_scanner.zig @@ -1263,7 +1263,7 @@ pub fn NewBundler( if (watcher_index == null) { var file = try std.fs.openFileAbsolute(absolute_path, .{ .read = true }); - try this.watcher.appendFile(file.handle, absolute_path, hash, .css, 0, true); + try this.watcher.appendFile(file.handle, absolute_path, hash, .css, 0, null, true); } try this.import_queue.writeItem(hash); diff --git a/src/http.zig b/src/http.zig index b46151fda..0ddc02540 100644 --- a/src/http.zig +++ b/src/http.zig @@ -20,6 +20,7 @@ const resolve_path = @import("./resolver/resolve_path.zig"); const OutputFile = Options.OutputFile; const DotEnv = @import("./env_loader.zig"); const mimalloc = @import("./allocators/mimalloc.zig"); +const MacroMap = @import("./resolver/package_json.zig").MacroMap; pub fn constStrToU8(s: string) []u8 { return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len]; } @@ -195,13 +196,17 @@ pub const RequestContext = struct { // The answer, however, is yes. // What if you're importing a fallback that's in node_modules? try fallback_entry_point.generate(bundler_.options.framework.?.fallback.path, Bundler, bundler_); + + const bundler_parse_options = Bundler.ParseOptions{ + .allocator = default_allocator, + .path = fallback_entry_point.source.path, + .loader = .js, + .macro_remappings = .{}, + .dirname_fd = 0, + }; + if (bundler_.parse( - default_allocator, - fallback_entry_point.source.path, - .js, - 0, - null, - null, + bundler_parse_options, @as(?*bundler.FallbackEntryPoint, &fallback_entry_point), )) |*result| { try bundler_.linker.link(fallback_entry_point.source.path, result, .absolute_url, false); @@ -697,7 +702,9 @@ pub const RequestContext = struct { var log = logger.Log.init(allocator); - const index = std.mem.indexOfScalar(u32, this.watcher.watchlist.items(.hash), id) orelse { + var watchlist_slice = this.watcher.watchlist.slice(); + + const index = std.mem.indexOfScalar(u32, watchlist_slice.items(.hash), id) orelse { // log.addErrorFmt(null, logger.Loc.Empty, this, "File missing from watchlist: {d}. Please refresh :(", .{hash}) catch unreachable; return WatchBuildResult{ @@ -708,9 +715,17 @@ pub const RequestContext = struct { }; }; - const file_path_str = this.watcher.watchlist.items(.file_path)[index]; - const fd = this.watcher.watchlist.items(.fd)[index]; - const loader = this.watcher.watchlist.items(.loader)[index]; + const file_path_str = watchlist_slice.items(.file_path)[index]; + const fd = watchlist_slice.items(.fd)[index]; + const loader = watchlist_slice.items(.loader)[index]; + const macro_remappings = brk: { + if (watchlist_slice.items(.package_json)[index]) |package_json| { + break :brk package_json.macros; + } + + break :brk MacroMap{}; + }; + const path = Fs.Path.init(file_path_str); var old_log = this.bundler.log; this.bundler.setLog(&log); @@ -731,12 +746,15 @@ pub const RequestContext = struct { this.bundler.resetStore(); var parse_result = this.bundler.parse( - allocator, - path, - loader, - 0, - fd, - id, + Bundler.ParseOptions{ + .allocator = allocator, + .path = path, + .loader = loader, + .dirname_fd = 0, + .file_descriptor = fd, + .file_hash = id, + .macro_remappings = macro_remappings, + }, null, ) orelse { return WatchBuildResult{ @@ -1935,6 +1953,7 @@ pub const RequestContext = struct { hash, loader, resolve_result.dirname_fd, + resolve_result.package_json, true, ); @@ -1995,6 +2014,7 @@ pub const RequestContext = struct { Watcher.getHash(result.file.input.text), result.file.loader, file.dir, + null, true, )) { if (ctx.watcher.watchloop_handle == null) { diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig index 9b1d22a05..b37fd7125 100644 --- a/src/javascript/jsc/javascript.zig +++ b/src/javascript/jsc/javascript.zig @@ -27,6 +27,8 @@ const Router = @import("./api/router.zig"); const ImportRecord = ast.ImportRecord; const DotEnv = @import("../../env_loader.zig"); const ParseResult = @import("../../bundler.zig").ParseResult; +const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON; +const MacroRemap = @import("../../resolver/package_json.zig").MacroMap; pub const GlobalClasses = [_]type{ Request.Class, Response.Class, @@ -567,6 +569,8 @@ pub const VirtualMachine = struct { macro_entry_points: std.AutoArrayHashMap(i32, *MacroEntryPoint), macro_mode: bool = false, + has_any_macro_remappings: bool = false, + pub const MacroMap = std.AutoArrayHashMap(i32, js.JSObjectRef); pub threadlocal var vm_loaded = false; @@ -807,10 +811,12 @@ pub const VirtualMachine = struct { var allocator = if (vm.has_loaded) &vm.arena.allocator else vm.allocator; var fd: ?StoredFileDescriptorType = null; + var package_json: ?*PackageJSON = null; if (vm.watcher) |watcher| { if (watcher.indexOf(hash)) |index| { fd = watcher.watchlist.items(.fd)[index]; + package_json = watcher.watchlist.items(.package_json)[index]; } } @@ -825,13 +831,31 @@ pub const VirtualMachine = struct { vm.bundler.resolver.log = old; } + const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings) + MacroRemap{} + else brk: { + if (package_json) |pkg| { + break :brk pkg.macros; + } + + // TODO: find a way to pass the package_json through the resolve + const resolve_result = vm.bundler.resolver.resolve(vm.bundler.fs.top_level_dir, specifier, .stmt) catch break :brk MacroRemap{}; + + break :brk resolve_result.getMacroRemappings(); + }; + + var parse_options = Bundler.ParseOptions{ + .allocator = allocator, + .path = path, + .loader = loader, + .dirname_fd = 0, + .file_descriptor = fd, + .file_hash = hash, + .macro_remappings = macro_remappings, + }; + var parse_result = vm.bundler.parse( - allocator, - path, - loader, - 0, - fd, - hash, + parse_options, null, ) orelse { return error.ParseError; @@ -916,6 +940,13 @@ pub const VirtualMachine = struct { .stmt, ); + if (!vm.macro_mode) { + vm.has_any_macro_remappings = vm.has_any_macro_remappings or brk: { + if (result.package_json == null) break :brk false; + + break :brk result.package_json.?.macros.count() > 0; + }; + } ret.result = result; const result_path = result.pathConst() orelse return error.ModuleNotFound; vm.resolved_count += 1; diff --git a/src/js_ast.zig b/src/js_ast.zig index c81b338e8..c14493987 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -3993,6 +3993,8 @@ pub const Macro = struct { const Zig = @import("./javascript/jsc/bindings/exports.zig"); const Bundler = @import("./bundler.zig").Bundler; const MacroEntryPoint = @import("./bundler.zig").MacroEntryPoint; + const MacroRemap = @import("./resolver/package_json.zig").MacroMap; + const MacroRemapEntry = @import("./resolver/package_json.zig").MacroImportReplacementMap; pub const namespace: string = "macro"; pub const namespaceWithColon: string = namespace ++ ":"; @@ -4007,12 +4009,18 @@ pub const Macro = struct { resolver: *Resolver, env: *DotEnv.Loader, macros: MacroMap, + remap: MacroRemap, + + pub fn getRemap(this: MacroContext, path: string) ?MacroRemapEntry { + return this.remap.get(path); + } pub fn init(bundler: *Bundler) MacroContext { return MacroContext{ .macros = MacroMap.init(default_allocator), .resolver = &bundler.resolver, .env = bundler.env, + .remap = MacroRemap{}, }; } @@ -4034,9 +4042,14 @@ pub const Macro = struct { defer Expr.Data.Store.disable_reset = false; defer Stmt.Data.Store.disable_reset = false; // const is_package_path = isPackagePath(specifier); - std.debug.assert(isMacroPath(import_record_path)); + const import_record_path_without_macro_prefix = if (isMacroPath(import_record_path)) + import_record_path[namespaceWithColon.len..] + else + import_record_path; + + std.debug.assert(!isMacroPath(import_record_path_without_macro_prefix)); - const resolve_result = this.resolver.resolve(source_dir, import_record_path[namespaceWithColon.len..], .stmt) catch |err| { + const resolve_result = this.resolver.resolve(source_dir, import_record_path_without_macro_prefix, .stmt) catch |err| { switch (err) { error.ModuleNotFound => { log.addResolveError( @@ -4706,7 +4719,13 @@ pub const Macro = struct { .e_undefined => |value| { return JSNode{ .loc = this.loc, .data = .{ .e_undefined = value } }; }, + .inline_identifier => |value| { + return JSNode{ .loc = this.loc, .data = .{ .inline_identifier = value } }; + }, else => { + if (comptime isDebug) { + Output.prettyWarnln("initExpr fail: {s}", .{@tagName(this.data)}); + } return JSNode{ .loc = this.loc, .data = .{ .e_missing = .{} } }; }, } @@ -4819,6 +4838,9 @@ pub const Macro = struct { .e_undefined => |value| { return Expr{ .loc = this.loc, .data = .{ .e_undefined = value } }; }, + .inline_identifier => |value| { + return Expr{ .loc = this.loc, .data = .{ .inline_identifier = value } }; + }, .fragment => |fragment| { if (fragment.len == 0) return Expr{ .loc = this.loc, .data = .{ .e_missing = E.Missing{} } }; @@ -5372,13 +5394,13 @@ pub const Macro = struct { if (!self.writeElement(el.*)) return false; }, // TODO: handle when simplification changes the expr type - .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { const visited_expr = self.p.visitExpr(child); switch (visited_expr.data) { .e_jsx_element => |el| { if (!self.writeElement(el.*)) return false; }, - .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.append(visited_expr) catch unreachable; }, else => { @@ -5411,13 +5433,13 @@ pub const Macro = struct { .e_jsx_element => |el| { if (!self.writeElementWithValidTagList(el.*, comptime Tag.Validator.valid_object_tags)) return false; }, - .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { const visited = self.p.visitExpr(child); switch (visited.data) { .e_jsx_element => |el| { if (!self.writeElementWithValidTagList(el.*, comptime Tag.Validator.valid_object_tags)) return false; }, - .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.append(visited) catch unreachable; }, else => { @@ -5455,7 +5477,7 @@ pub const Macro = struct { .e_jsx_element => |el| { if (!self.writeElement(el.*)) return false; }, - .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.append(self.p.visitExpr(prop)) catch unreachable; }, else => { @@ -5473,7 +5495,7 @@ pub const Macro = struct { if (!self.writeElement(el.*)) return false; }, - .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.append(self.p.visitExpr(prop)) catch unreachable; }, else => { @@ -5493,7 +5515,7 @@ pub const Macro = struct { .e_string => |str| { self.args.append(prop) catch unreachable; }, - .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.append(self.p.visitExpr(prop)) catch unreachable; }, else => { @@ -5538,7 +5560,7 @@ pub const Macro = struct { self.args.appendAssumeCapacity(Expr{ .loc = value.loc, .data = .{ .e_string = if (boolean.value) &E.String.@"true" else &E.String.@"false" } }); }, // these ones are not statically analyzable so we just leave them in as-is - .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.appendAssumeCapacity(self.p.visitExpr(value)); }, // everything else is invalid @@ -5601,15 +5623,7 @@ pub const Macro = struct { .e_jsx_element => if (c.data.e_jsx_element.tag != null) 1 else brk: { break :brk c.data.e_jsx_element.children.len; }, - .e_identifier => 1, - else => brk: { - self.log.addError( - self.p.source, - c.loc, - "<inject> children must be JSX AST nodes", - ) catch unreachable; - break :brk 0; - }, + else => 1, }; } self.args.ensureUnusedCapacity(2 + count) catch unreachable; @@ -5625,10 +5639,9 @@ pub const Macro = struct { .e_jsx_element => |el| { if (!self.writeElementWithValidTagList(el.*, comptime Tag.Validator.valid_inject_tags)) return false; }, - .e_spread, .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { - self.args.append(child) catch unreachable; + else => { + self.args.append(self.p.visitExpr(child)) catch unreachable; }, - else => {}, } } @@ -5697,7 +5710,7 @@ pub const Macro = struct { ) catch unreachable; return false; }, - .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.appendAssumeCapacity(p.visitExpr(path_property)); }, else => { @@ -5765,7 +5778,7 @@ pub const Macro = struct { ) catch unreachable; return false; }, - .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { + .e_template, .e_if, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { self.args.appendAssumeCapacity(p.visitExpr(default)); }, else => { @@ -5801,6 +5814,7 @@ pub const Macro = struct { .e_jsx_element => |el| { if (!self.writeElement(el.*)) return false; }, + .e_if, .e_spread, .e_identifier, .e_import_identifier, .e_index, .e_call, .e_private_identifier, .e_dot, .e_unary, .e_binary => { const visited = self.p.visitExpr(child); switch (visited.data) { @@ -5903,6 +5917,28 @@ pub const Macro = struct { }, else => Global.panic("Not implemented yet top-level jsx element: {s}", .{@tagName(tag_expr.data)}), } + } else { + const loc = logger.Loc.Empty; + self.p.recordUsage(self.bun_jsx_ref); + _ = self.writeNodeType(JSNode.Tag.fragment, element.properties, element.children, loc); + var call_args = self.p.allocator.alloc(Expr, 1) catch unreachable; + call_args[0] = Expr.alloc(self.p.allocator, E.Array, E.Array{ .items = self.args.items }, loc); + + return Expr.alloc( + self.p.allocator, + E.Call, + E.Call{ + .target = Expr{ + .data = .{ + .e_identifier = self.bun_identifier, + }, + .loc = loc, + }, + .can_be_unwrapped_if_unused = true, + .args = call_args, + }, + loc, + ); } return Expr{ .data = .{ .e_missing = .{} }, .loc = logger.Loc.Empty }; @@ -6228,21 +6264,26 @@ pub const Macro = struct { if (!JSLexer.isIdentifier(alias)) throwTypeError(writer.ctx, "import alias must be an identifier", writer.exception); import.import.items[import_item_i] = ClauseItem{ - .alias = alias, - .original_name = name, + .alias = name, + .original_name = alias, .name = .{ .loc = writer.loc, .ref = Ref.None }, .alias_loc = writer.loc, }; import_item_i += 1; } + } else { + import.import.items = writer.allocator.alloc( + ClauseItem, + @intCast(u32, @boolToInt(has_default)), + ) catch return false; } if (has_default) { import.import.items[import_item_i] = ClauseItem{ - .alias = import_default_name, + .alias = "default", .name = .{ .loc = writer.loc, .ref = Ref.None }, - .original_name = "default", + .original_name = import_default_name, .alias_loc = writer.loc, }; import_item_i += 1; @@ -6472,13 +6513,8 @@ pub const Macro = struct { const next_value = (writer.eatArg() orelse return null); const next_value_ref = next_value.asRef(); if (js.JSValueIsArray(writer.ctx, next_value_ref)) { - const array = next_value; - const array_len = JSC.JSValue.getLengthOfArray(next_value, JavaScript.VirtualMachine.vm.global); - - var array_i: u32 = 0; - while (array_i < array_len) : (array_i += 1) { - var current_value = JSC.JSObject.getIndex(array, JavaScript.VirtualMachine.vm.global, i); - + var iter = JSC.JSArrayIterator.init(next_value, JavaScript.VirtualMachine.vm.global); + while (iter.next()) |current_value| { switch (TagOrJSNode.fromJSValueRef(writer, writer.ctx, current_value.asRef())) { .node => |node| { if (node.data != .s_import) { @@ -6497,11 +6533,28 @@ pub const Macro = struct { } i += 1; continue; + } else { + switch (TagOrJSNode.fromJSValueRef(writer, writer.ctx, next_value_ref)) { + .tag => |tag2| { + if (!writer.writeFromJSWithTagInNode(tag2)) return null; + }, + TagOrJSNode.node => |node| { + writer.inject.append(node) catch unreachable; + }, + TagOrJSNode.invalid => { + return null; + }, + } } } return JSNode{ .data = .{ .inline_inject = writer.inject.toOwnedSlice() }, .loc = writer.loc }; } + if (tag == Tag.s_import) { + if (!writer.writeFromJSWithTagInNode(tag)) return null; + return writer.inject.items[0]; + } + if (tag == Tag.fragment) { const count: u32 = (writer.eatArg() orelse return null).toU32(); // collapse single-item fragments @@ -6794,8 +6847,8 @@ pub const Macro = struct { var value_node: JSNode = undefined; for (properties) |property| { - if (strings.eql(property.alias, property_slice)) { - return JSC.JSValue.jsNumberFromInt32(JSNode.SymbolMap.generateImportHash(property.alias, this.import_data.path)).asRef(); + if (strings.eql(property.original_name, property_slice)) { + return JSC.JSValue.jsNumberFromInt32(JSNode.SymbolMap.generateImportHash(property.original_name, this.import_data.path)).asRef(); } } @@ -6815,7 +6868,7 @@ pub const Macro = struct { var property_slice = ptr[0..len]; for (properties) |property| { - if (strings.eql(property.alias, property_slice)) return true; + if (strings.eql(property.original_name, property_slice)) return true; } return false; @@ -6831,7 +6884,7 @@ pub const Macro = struct { const items = this.import_data.import.items; for (items) |clause| { - const str = clause.alias; + const str = clause.original_name; js.JSPropertyNameAccumulatorAddName(props, js.JSStringCreateStatic(str.ptr, str.len)); } } diff --git a/src/js_parser/js_parser.zig b/src/js_parser/js_parser.zig index 04ad74286..cb3e0ff0c 100644 --- a/src/js_parser/js_parser.zig +++ b/src/js_parser/js_parser.zig @@ -6216,6 +6216,12 @@ pub fn NewParser( } } + const macro_remap = if (FeatureFlags.is_macro_enabled and !is_macro and jsx_transform_type != .macro) + p.options.macro_context.getRemap(path.text) + else + null; + + var remap_count: u16 = 0; if (stmt.star_name_loc) |star| { const name = p.loadNameFromRef(stmt.namespace_ref); stmt.namespace_ref = try p.declareSymbol(.import, star, name); @@ -6248,6 +6254,10 @@ pub fn NewParser( var item_refs = ImportItemForNamespaceMap.init(p.allocator); + const total_count = @intCast(u16, stmt.items.len) + + @intCast(u16, @boolToInt(stmt.default_name != null)) + + @intCast(u16, @boolToInt(stmt.star_name_loc != null)); + // Link the default item to the namespace if (stmt.default_name) |*name_loc| { const name = p.loadNameFromRef(name_loc.ref orelse unreachable); @@ -6266,6 +6276,20 @@ pub fn NewParser( if (is_macro) { try p.macro.refs.put(ref, stmt.import_record_index); } + + if (macro_remap) |remap| { + if (remap.get("default")) |remapped_path| { + const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path); + try p.macro.refs.put(ref, new_import_id); + stmt.default_name = null; + p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace; + if (comptime only_scan_imports_and_do_not_visit) { + p.import_records.items[new_import_id].is_internal = true; + p.import_records.items[new_import_id].path.is_disabled = true; + } + remap_count += 1; + } + } } if (stmt.items.len > 0) { @@ -6290,6 +6314,33 @@ pub fn NewParser( if (is_macro) { try p.macro.refs.put(ref, stmt.import_record_index); } + + if (macro_remap) |remap| { + if (remap.get(item.alias)) |remapped_path| { + const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path); + p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace; + if (comptime only_scan_imports_and_do_not_visit) { + p.import_records.items[new_import_id].is_internal = true; + p.import_records.items[new_import_id].path.is_disabled = true; + } + try p.macro.refs.put(ref, new_import_id); + remap_count += 1; + + continue; + } + } + } + } + + // If we remapped the entire import away + // i.e. import {graphql} "react-relay" + + if (remap_count > 0 and remap_count == total_count) { + p.import_records.items[stmt.import_record_index].path.namespace = js_ast.Macro.namespace; + + if (comptime only_scan_imports_and_do_not_visit) { + p.import_records.items[stmt.import_record_index].path.is_disabled = true; + p.import_records.items[stmt.import_record_index].is_internal = true; } } @@ -9618,16 +9669,19 @@ pub fn NewParser( ) catch unreachable; for (import.items) |*clause| { - if (strings.eqlComptime(clause.original_name, "default")) { + const import_hash_name = clause.original_name; + + if (strings.eqlComptime(clause.alias, "default") and strings.eqlComptime(clause.original_name, "default")) { var non_unique_name = record.path.name.nonUniqueNameString(p.allocator) catch unreachable; clause.original_name = std.fmt.allocPrint(p.allocator, "{s}_default", .{non_unique_name}) catch unreachable; } const name_ref = p.declareSymbol(.import, this.loc, clause.original_name) catch unreachable; clause.name = LocRef{ .loc = this.loc, .ref = name_ref }; - p.macro.imports.putAssumeCapacity(js_ast.Macro.JSNode.SymbolMap.generateImportHash(clause.alias, import_data.path), name_ref); p.is_import_item.put(name_ref, true) catch unreachable; + p.macro.imports.putAssumeCapacity(js_ast.Macro.JSNode.SymbolMap.generateImportHash(import_hash_name, import_data.path), name_ref); + // Ensure we don't accidentally think this is an export from clause.original_name = ""; } @@ -10350,6 +10404,7 @@ pub fn NewParser( return JSXTag{ .range = logger.Range{ .loc = loc, .len = 0 }, .data = Data{ .fragment = 1 }, + .name = "", }; } diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index bff445bf2..4949fe49e 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -15,6 +15,8 @@ const resolve_path = @import("./resolve_path.zig"); const MainFieldMap = std.StringArrayHashMap(string); pub const BrowserMap = std.StringArrayHashMap(string); threadlocal var hashy: [2048]u8 = undefined; +pub const MacroImportReplacementMap = std.StringArrayHashMap(string); +pub const MacroMap = std.StringArrayHashMapUnmanaged(MacroImportReplacementMap); pub const PackageJSON = struct { pub const LoadFramework = enum { @@ -56,6 +58,7 @@ pub const PackageJSON = struct { hash: u32 = 0xDEADBEEF, always_bundle: []string = &.{}, + macros: MacroMap = MacroMap{}, // Present if the "browser" field is present. This field is intended to be // used by bundlers and lets you redirect the paths of certain 3rd-party @@ -480,6 +483,7 @@ pub const PackageJSON = struct { .hash = 0xDEADBEEF, .source = json_source, .module_type = .unknown, + .macros = MacroMap{}, .browser_map = BrowserMap.init(r.allocator), .main_fields = MainFieldMap.init(r.allocator), }; @@ -548,6 +552,68 @@ pub const PackageJSON = struct { // for (var i = 0; i < bundle_.expr.data.e_array.len; i++) { } } + + if (bun_json.expr.asProperty("macros")) |macros| { + if (macros.expr.data == .e_object) { + var always_bundle_count: u16 = 0; + const properties = macros.expr.data.e_object.properties; + + for (properties) |property| { + const key = property.key.?.asString(r.allocator) orelse continue; + if (!resolver.isPackagePath(key)) { + r.log.addRangeWarningFmt( + &json_source, + json_source.rangeOfString(property.key.?.loc), + r.allocator, + "\"{s}\" is not a package path. \"macros\" remaps package paths to macros. Skipping.", + .{key}, + ) catch unreachable; + continue; + } + + const value = property.value.?; + if (value.data != .e_object) { + r.log.addWarningFmt( + &json_source, + value.loc, + r.allocator, + "Invalid macro remapping in \"{s}\": expected object where the keys are import names and the value is a string path to replace", + .{key}, + ) catch unreachable; + continue; + } + + const remap_properties = value.data.e_object.properties; + if (remap_properties.len == 0) continue; + + var map = MacroImportReplacementMap.init(r.allocator); + map.ensureUnusedCapacity(remap_properties.len) catch unreachable; + for (remap_properties) |remap| { + const import_name = remap.key.?.asString(r.allocator) orelse continue; + const remap_value = remap.value.?; + if (remap_value.data != .e_string or remap_value.data.e_string.utf8.len == 0) { + r.log.addWarningFmt( + &json_source, + remap_value.loc, + r.allocator, + "Invalid macro remapping for import \"{s}\": expected string to remap to. e.g. \"graphql\": \"bun-macro-relay\" ", + .{import_name}, + ) catch unreachable; + continue; + } + + const remap_value_str = remap_value.data.e_string.utf8; + + map.putAssumeCapacityNoClobber(import_name, remap_value_str); + } + + if (map.count() > 0) { + package_json.macros.put(r.allocator, key, map) catch unreachable; + } + } + // for (var i = 0; i < bundle_.expr.data.e_array.len; i++) { + } + } } // Read the "main" fields diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index fade021a7..a1066dfb7 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -8,6 +8,7 @@ const cache = @import("../cache.zig"); const sync = @import("../sync.zig"); const TSConfigJSON = @import("./tsconfig_json.zig").TSConfigJSON; const PackageJSON = @import("./package_json.zig").PackageJSON; +const MacroRemap = @import("./package_json.zig").MacroMap; const ESModule = @import("./package_json.zig").ESModule; const BrowserMap = @import("./package_json.zig").BrowserMap; const CacheSet = cache.Set; @@ -106,6 +107,12 @@ pub const Result = struct { file_fd: StoredFileDescriptorType = 0, import_kind: ast.ImportKind = undefined, + pub fn getMacroRemappings(this: *const Result) MacroRemap { + const pkg = this.package_json orelse return MacroRemap{}; + + return pkg.macros; + } + pub fn path(this: *Result) ?*Path { if (!this.path_pair.primary.is_disabled) return &this.path_pair.primary; diff --git a/src/watcher.zig b/src/watcher.zig index 9948a01da..105c29b12 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -10,6 +10,8 @@ const KEvent = std.os.Kevent; const Mutex = @import("./lock.zig").Lock; const WatchItemIndex = u16; const NoWatchItem: WatchItemIndex = std.math.maxInt(WatchItemIndex); +const PackageJSON = @import("./resolver/package_json.zig").PackageJSON; + pub const WatchItem = struct { file_path: string, // filepath hash for quick comparison @@ -20,6 +22,7 @@ pub const WatchItem = struct { count: u32, parent_hash: u32, kind: Kind, + package_json: ?*PackageJSON, pub const Kind = enum { file, directory }; }; @@ -240,13 +243,14 @@ pub fn NewWatcher(comptime ContextType: type) type { hash: HashType, loader: options.Loader, dir_fd: StoredFileDescriptorType, + package_json: ?*PackageJSON, comptime copy_file_path: bool, ) !void { if (this.indexOf(hash) != null) { return; } - try this.appendFile(fd, file_path, hash, loader, dir_fd, copy_file_path); + try this.appendFile(fd, file_path, hash, loader, dir_fd, package_json, copy_file_path); } fn appendFileAssumeCapacity( @@ -256,6 +260,7 @@ pub fn NewWatcher(comptime ContextType: type) type { hash: HashType, loader: options.Loader, parent_hash: HashType, + package_json: ?*PackageJSON, comptime copy_file_path: bool, ) !void { const index = this.eventlist_used; @@ -309,6 +314,7 @@ pub fn NewWatcher(comptime ContextType: type) type { .eventlist_index = @truncate(u32, index), .loader = loader, .parent_hash = parent_hash, + .package_json = package_json, .kind = .file, }); } @@ -374,6 +380,7 @@ pub fn NewWatcher(comptime ContextType: type) type { .loader = options.Loader.file, .parent_hash = parent_hash, .kind = .directory, + .package_json = null, }); return @truncate(WatchItemIndex, this.watchlist.len - 1); } @@ -408,6 +415,7 @@ pub fn NewWatcher(comptime ContextType: type) type { hash: HashType, loader: options.Loader, dir_fd: StoredFileDescriptorType, + package_json: ?*PackageJSON, comptime copy_file_path: bool, ) !void { this.mutex.lock(); @@ -449,6 +457,7 @@ pub fn NewWatcher(comptime ContextType: type) type { hash, loader, parent_dir_hash, + package_json, copy_file_path, ); |