diff options
author | 2022-04-16 09:29:10 -0700 | |
---|---|---|
committer | 2022-04-16 09:29:10 -0700 | |
commit | 0137e5cf94a2cfd510f70d8881f67e8066e0d098 (patch) | |
tree | c16b792c1e75b010f5e8a9b0cc7f924f69e3e236 | |
parent | 89ca887ea0c0c673f1c1c22cb5913f09435feeb6 (diff) | |
download | bun-0137e5cf94a2cfd510f70d8881f67e8066e0d098.tar.gz bun-0137e5cf94a2cfd510f70d8881f67e8066e0d098.tar.zst bun-0137e5cf94a2cfd510f70d8881f67e8066e0d098.zip |
[JS Parser] Support explicit removing
-rw-r--r-- | integration/bunjs-only-snippets/transpiler.test.js | 63 | ||||
-rw-r--r-- | src/api/bundle_v2.peechy | 75 | ||||
-rw-r--r-- | src/bundler.zig | 14 | ||||
-rw-r--r-- | src/javascript/jsc/api/server.zig | 139 | ||||
-rw-r--r-- | src/javascript/jsc/api/transpiler.zig | 243 | ||||
-rw-r--r-- | src/javascript/jsc/bindings/bindings.zig | 9 | ||||
-rw-r--r-- | src/options.zig | 2 | ||||
-rw-r--r-- | src/runtime.zig | 17 | ||||
-rw-r--r-- | src/string_immutable.zig | 2 | ||||
-rw-r--r-- | src/toml/toml_lexer.zig | 4 |
10 files changed, 480 insertions, 88 deletions
diff --git a/integration/bunjs-only-snippets/transpiler.test.js b/integration/bunjs-only-snippets/transpiler.test.js index c0646f578..4759b17c9 100644 --- a/integration/bunjs-only-snippets/transpiler.test.js +++ b/integration/bunjs-only-snippets/transpiler.test.js @@ -1,27 +1,67 @@ import { expect, it, describe } from "bun:test"; describe("Bun.Transpiler", () => { - describe("replaceExports", () => { + describe("exports.replace", () => { const transpiler = new Bun.Transpiler({ exports: { replace: { - // Next.js does this + // export var foo = function() { } + // => + // export var foo = "bar"; + foo: "bar", + + // export const getStaticProps = /* code */ + // => + // export var __N_SSG = true; getStaticProps: ["__N_SSG", true], - localVarToReplace: 2, + getStaticPaths: ["__N_SSG", true], + // export function getStaticProps(ctx) { /* code */ } + // => + // export var __N_SSP = true; + getServerSideProps: ["__N_SSP", true], }, - // Remix could possibly do this when building for browsers - // to automatically remove imports only referenced within the loader - // For Remix, it probably is less impactful due to .client and .server conventions in place - eliminate: ["loader", "localVarToRemove"], + + // Explicitly remove the top-level export, even if it is in use by + // another part of the file + eliminate: ["loader"], }, + /* only per-file for now, so this isn't good yet */ treeShaking: true, + + // remove non-bare unused exports, even if they may have side effects + // Consistent with tsc & esbuild, this is enabled by default for TypeScript files + // this flag lets you enable it for JavaScript files + // this already existed, just wasn't exposed in the API trimUnusedImports: true, }); + + it("a deletes dead exports and any imports only referenced in dead regions", () => { + console.log("b"); + + const out = transpiler.transformSync(` + import {getUserById} from './my-database'; + + export async function getStaticProps(ctx){ + return { props: { user: await getUserById(ctx.params.id) } }; + } + + export default function MyComponent({user}) { + getStaticProps(); + return <div id='user'>{user.name}</div>; + } + `); + + // when all three flags are set, it means + console.log(out); + }); + it("deletes dead exports and any imports only referenced in dead regions", () => { const output = transpiler.transformSync(` import deadFS from 'fs'; import liveFS from 'fs'; + export var deleteMe = 100; + export function loader() { deadFS.readFileSync("/etc/passwd"); liveFS.readFileSync("/etc/passwd"); @@ -29,10 +69,11 @@ describe("Bun.Transpiler", () => { export function action() { require("foo"); - liveFS.readFileSync("/etc/passwd"); + liveFS.readFileSync("/etc/passwd") + deleteMe = 101; } - export default function() { + export function baz() { require("bar"); } `); @@ -58,14 +99,16 @@ describe("Bun.Transpiler", () => { export {getStaticProps} - export default function() { + export function baz() { liveFS.readFileSync("/etc/passwd"); require("bar"); } `); + console.log(output); expect(output.includes("loader")).toBe(false); expect(output.includes("react")).toBe(false); expect(output.includes("deadFS")).toBe(false); + expect(output.includes("default")).toBe(false); expect(output.includes("anotherDeadFS")).toBe(false); expect(output.includes("liveFS")).toBe(true); expect(output.includes("__N_SSG")).toBe(true); diff --git a/src/api/bundle_v2.peechy b/src/api/bundle_v2.peechy new file mode 100644 index 000000000..db83d257b --- /dev/null +++ b/src/api/bundle_v2.peechy @@ -0,0 +1,75 @@ +struct Export { + uint32 part_id; + StringPointer name; +} + + +struct JavascriptBundledPart { + StringPointer code; + + uint32 dependencies_offset; + uint32 dependencies_length; + + uint32 exports_offset; + uint32 exports_length; + + uint32 from_module; + + // The ESM export is this id ("$" + number.toString(16)) + uint32 id; +} + +struct JavascriptBundledModule { + // package-relative path including file extension + StringPointer path; + + uint32 parts_offset; + uint32 parts_length; + + uint32 exports_offset; + uint32 exports_length; + + // index into JavascriptBundle.packages + uint32 package_id; + + // This lets us efficiently compare strings ignoring the extension + byte path_extname_length; +} + +struct JavascriptBundledPackage { + StringPointer name; + StringPointer version; + uint32 hash; + + uint32 modules_offset; + uint32 modules_length; +} + +struct JavascriptBundle { + // These are sorted alphabetically so you can do binary search + JavascriptBundledModule[] modules; + JavascriptBundledPackage[] packages; + + // This is ASCII-encoded so you can send it directly over HTTP + byte[] etag; + + uint32 generated_at; + + byte[] import_from_name; + + // This is what StringPointer refers to + byte[] manifest_string; +} + +message JavascriptBundleContainer { + uint32 bundle_format_version = 1; + + // These go first so if we change JavaScriptBundle we can still read these + LoadedRouteConfig routes = 3; + LoadedFramework framework = 2; + + JavascriptBundle bundle = 4; + + // Don't technically need to store this, but it may be helpful as a sanity check + uint32 code_length = 5; +} diff --git a/src/bundler.zig b/src/bundler.zig index 307ab3c97..957c4af5b 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -207,7 +207,7 @@ pub const Bundler = struct { existing_bundle, ); - var env_loader = env_loader_ orelse brk: { + var env_loader = env_loader_ orelse DotEnv.instance orelse brk: { var map = try allocator.create(DotEnv.Map); map.* = DotEnv.Map.init(allocator); @@ -215,7 +215,10 @@ pub const Bundler = struct { loader.* = DotEnv.Loader.init(map, allocator); break :brk loader; }; - DotEnv.instance = env_loader; + + if (DotEnv.instance == null) { + DotEnv.instance = env_loader; + } // var pool = try allocator.create(ThreadPool); // try pool.init(ThreadPool.InitConfig{ // .allocator = allocator, @@ -1717,7 +1720,8 @@ pub const Bundler = struct { opts.warn_about_unbundled_modules = false; opts.macro_context = &worker.data.macro_context; opts.features.auto_import_jsx = jsx.parse; - + opts.features.trim_unused_imports = this.bundler.options.trim_unused_imports orelse loader.isTypeScript(); + opts.tree_shaking = this.bundler.options.tree_shaking; ast = (bundler.resolver.caches.js.parse( bundler.allocator, opts, @@ -2128,6 +2132,7 @@ pub const Bundler = struct { jsx.parse = loader.isJSX(); var opts = js_parser.Parser.Options.init(jsx, loader); opts.macro_context = &worker.data.macro_context; + opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript(); try bundler.resolver.caches.js.scan( bundler.allocator, @@ -2819,6 +2824,7 @@ pub const Bundler = struct { jsx: options.JSX.Pragma, macro_remappings: MacroRemap, virtual_source: ?*const logger.Source = null, + replace_exports: runtime.Runtime.Features.ReplaceableExport.Map = .{}, }; pub fn parse( @@ -2905,6 +2911,7 @@ pub const Bundler = struct { opts.enable_bundling = false; opts.transform_require_to_import = bundler.options.allow_runtime; opts.features.allow_runtime = bundler.options.allow_runtime; + opts.features.trim_unused_imports = bundler.options.trim_unused_imports orelse loader.isTypeScript(); opts.can_import_from_bundle = bundler.options.node_modules_bundle != null; @@ -2936,6 +2943,7 @@ pub const Bundler = struct { opts.macro_context = &bundler.macro_context.?; opts.features.is_macro_runtime = bundler.options.platform == .bun_macro; + opts.features.replace_exports = this_parse.replace_exports; const value = (bundler.resolver.caches.js.parse( allocator, diff --git a/src/javascript/jsc/api/server.zig b/src/javascript/jsc/api/server.zig index 9f38045f0..a76ddf9ab 100644 --- a/src/javascript/jsc/api/server.zig +++ b/src/javascript/jsc/api/server.zig @@ -254,7 +254,7 @@ pub const ServerConfig = struct { args.development = false; } - const PORT_ENV = .{ "PORT", "BUN_PORT" }; + const PORT_ENV = .{ "PORT", "BUN_PORT", "NODE_PORT" }; inline for (PORT_ENV) |PORT| { if (env.get(PORT)) |port| { @@ -428,6 +428,74 @@ pub const ServerConfig = struct { } }; +pub fn NewRequestContextStackAllocator(comptime RequestContext: type, comptime count: usize) type { + // Pre-allocate up to 2048 requests + // use a bitset to track which ones are used + return struct { + buf: [count]RequestContext = undefined, + unused: Set = undefined, + fallback_allocator: std.mem.Allocator = undefined, + + pub const Set = std.bit_set.ArrayBitSet(usize, count); + + pub fn get(this: *@This()) std.mem.Allocator { + this.unused = Set.initFull(); + return std.mem.Allocator.init(this, alloc, resize, free); + } + + fn alloc(self: *@This(), a: usize, b: u29, c: u29, d: usize) ![]u8 { + if (self.unused.findFirstSet()) |i| { + self.unused.unset(i); + return std.mem.asBytes(&self.buf[i]); + } + + return try self.fallback_allocator.rawAlloc(a, b, c, d); + } + + fn resize( + _: *@This(), + _: []u8, + _: u29, + _: usize, + _: u29, + _: usize, + ) ?usize { + unreachable; + } + + fn sliceContainsSlice(container: []u8, slice: []u8) bool { + return @ptrToInt(slice.ptr) >= @ptrToInt(container.ptr) and + (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(container.ptr) + container.len); + } + + fn free( + self: *@This(), + buf: []u8, + buf_align: u29, + return_address: usize, + ) void { + _ = buf_align; + _ = return_address; + const bytes = std.mem.asBytes(&self.buf); + if (sliceContainsSlice(bytes, buf)) { + const index = if (bytes[0..buf.len].ptr != buf.ptr) + (@ptrToInt(buf.ptr) - @ptrToInt(bytes)) / @sizeOf(RequestContext) + else + @as(usize, 0); + + if (comptime Environment.allow_assert) { + std.debug.assert(@intToPtr(*RequestContext, @ptrToInt(buf.ptr)) == &self.buf[index]); + std.debug.assert(!self.unused.isSet(index)); + } + + self.unused.set(index); + } else { + self.fallback_allocator.rawFree(buf, buf_align, return_address); + } + } + }; +} + // This is defined separately partially to work-around an LLVM debugger bug. fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comptime ThisServer: type) type { return struct { @@ -436,6 +504,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp pub threadlocal var pool: ?*RequestContext.RequestContextStackAllocator = null; pub threadlocal var pool_allocator: std.mem.Allocator = undefined; + pub const RequestContextStackAllocator = NewRequestContextStackAllocator(RequestContext, 2048); + server: *ThisServer, resp: *App.Response, /// thread-local default heap allocator @@ -459,76 +529,11 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp sendfile: SendfileContext = undefined, request_js_object: JSC.C.JSObjectRef = null, request_body_buf: std.ArrayListUnmanaged(u8) = .{}, + /// Used either for temporary blob data or fallback /// When the response body is a temporary value response_buf_owned: std.ArrayListUnmanaged(u8) = .{}, - // Pre-allocate up to 2048 requests - // use a bitset to track which ones are used - pub const RequestContextStackAllocator = struct { - buf: [2048]RequestContext = undefined, - unused: Set = undefined, - fallback_allocator: std.mem.Allocator = undefined, - - pub const Set = std.bit_set.ArrayBitSet(usize, 2048); - - pub fn get(this: *@This()) std.mem.Allocator { - this.unused = Set.initFull(); - return std.mem.Allocator.init(this, alloc, resize, free); - } - - fn alloc(self: *@This(), a: usize, b: u29, c: u29, d: usize) ![]u8 { - if (self.unused.findFirstSet()) |i| { - self.unused.unset(i); - return std.mem.asBytes(&self.buf[i]); - } - - return try self.fallback_allocator.rawAlloc(a, b, c, d); - } - - fn resize( - _: *@This(), - _: []u8, - _: u29, - _: usize, - _: u29, - _: usize, - ) ?usize { - unreachable; - } - - fn sliceContainsSlice(container: []u8, slice: []u8) bool { - return @ptrToInt(slice.ptr) >= @ptrToInt(container.ptr) and - (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(container.ptr) + container.len); - } - - fn free( - self: *@This(), - buf: []u8, - buf_align: u29, - return_address: usize, - ) void { - _ = buf_align; - _ = return_address; - const bytes = std.mem.asBytes(&self.buf); - if (sliceContainsSlice(bytes, buf)) { - const index = if (bytes[0..buf.len].ptr != buf.ptr) - (@ptrToInt(buf.ptr) - @ptrToInt(bytes)) / @sizeOf(RequestContext) - else - @as(usize, 0); - - if (comptime Environment.allow_assert) { - std.debug.assert(@intToPtr(*RequestContext, @ptrToInt(buf.ptr)) == &self.buf[index]); - std.debug.assert(!self.unused.isSet(index)); - } - - self.unused.set(index); - } else { - self.fallback_allocator.rawFree(buf, buf_align, return_address); - } - } - }; - // TODO: support builtin compression const can_sendfile = !ssl_enabled; diff --git a/src/javascript/jsc/api/transpiler.zig b/src/javascript/jsc/api/transpiler.zig index e7c78ac78..9b4fce4de 100644 --- a/src/javascript/jsc/api/transpiler.zig +++ b/src/javascript/jsc/api/transpiler.zig @@ -41,6 +41,8 @@ const JSPrinter = @import("../../../js_printer.zig"); const ScanPassResult = JSParser.ScanPassResult; const Mimalloc = @import("../../../mimalloc_arena.zig"); const Runtime = @import("../../../runtime.zig").Runtime; +const JSLexer = @import("../../../js_lexer.zig"); +const Expr = JSAst.Expr; bundler: Bundler.Bundler, arena: std.heap.ArenaAllocator, @@ -64,6 +66,12 @@ pub const Class = NewClass( .transformSync = .{ .rfn = transformSync, }, + // .resolve = .{ + // .rfn = resolve, + // }, + // .buildSync = .{ + // .rfn = buildSync, + // }, .finalize = finalize, }, .{}, @@ -82,7 +90,6 @@ const default_transform_options: Api.TransformOptions = brk: { opts.disable_hmr = true; opts.platform = Api.Platform.browser; opts.serve = false; - break :brk opts; }; @@ -94,8 +101,9 @@ const TranspilerOptions = struct { tsconfig_buf: []const u8 = "", macros_buf: []const u8 = "", log: logger.Log, - pending_tasks: u32 = 0, runtime: Runtime.Features = Runtime.Features{ .top_level_await = true }, + tree_shaking: bool = false, + trim_unused_imports: ?bool = null, }; // Mimalloc gets unstable if we try to move this to a different thread @@ -114,6 +122,7 @@ pub const TransformTask = struct { tsconfig: ?*TSConfigJSON = null, loader: Loader, global: *JSGlobalObject, + replace_exports: Runtime.Features.ReplaceableExport.Map = .{}, pub const AsyncTransformTask = JSC.ConcurrentPromiseTask(TransformTask); pub const AsyncTransformEventLoopTask = AsyncTransformTask.EventLoopTask; @@ -129,6 +138,7 @@ pub const TransformTask = struct { .tsconfig = transpiler.transpiler_options.tsconfig, .log = logger.Log.init(bun.default_allocator), .loader = loader, + .replace_exports = transpiler.transpiler_options.runtime.replace_exports, }; transform_task.bundler = transpiler.bundler; transform_task.bundler.linker.resolver = &transform_task.bundler.resolver; @@ -170,6 +180,7 @@ pub const TransformTask = struct { .jsx = jsx, .path = source.path, .virtual_source = &source, + .replace_exports = this.replace_exports, // .allocator = this. }; @@ -265,7 +276,63 @@ pub const TransformTask = struct { } }; -fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allocator, args: *JSC.Node.ArgumentsSlice, exception: JSC.C.ExceptionRef) TranspilerOptions { +fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject) ?JSAst.Expr { + if (value.isBoolean()) { + return Expr{ + .data = .{ + .e_boolean = .{ + .value = value.toBoolean(), + }, + }, + .loc = logger.Loc.Empty, + }; + } + + if (value.isNumber()) { + return Expr{ + .data = .{ + .e_number = .{ .value = value.asNumber() }, + }, + .loc = logger.Loc.Empty, + }; + } + + if (value.isNull()) { + return Expr{ + .data = .{ + .e_null = .{}, + }, + .loc = logger.Loc.Empty, + }; + } + + if (value.isUndefined()) { + return Expr{ + .data = .{ + .e_undefined = .{}, + }, + .loc = logger.Loc.Empty, + }; + } + + if (value.isString()) { + var str = JSAst.E.String{ + .utf8 = std.fmt.allocPrint(bun.default_allocator, "{}", .{value.getZigString(globalThis)}) catch unreachable, + }; + var out = bun.default_allocator.create(JSAst.E.String) catch unreachable; + out.* = str; + return Expr{ + .data = .{ + .e_string = out, + }, + .loc = logger.Loc.Empty, + }; + } + + return null; +} + +fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allocator, args: *JSC.Node.ArgumentsSlice, exception: JSC.C.ExceptionRef) !TranspilerOptions { var globalThis = ctx.ptr(); const object = args.next() orelse return TranspilerOptions{ .log = logger.Log.init(temp_allocator) }; if (object.isUndefinedOrNull()) return TranspilerOptions{ .log = logger.Log.init(temp_allocator) }; @@ -498,6 +565,168 @@ fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allo } } + var tree_shaking: ?bool = null; + if (object.get(globalThis, "treeShaking")) |treeShaking| { + tree_shaking = treeShaking.toBoolean(); + } + + var trim_unused_imports: ?bool = null; + if (object.get(globalThis, "trimUnusedImports")) |trimUnusedImports| { + trim_unused_imports = trimUnusedImports.toBoolean(); + } + + if (object.getTruthy(globalThis, "exports")) |exports| { + if (!exports.isObject()) { + JSC.throwInvalidArguments("exports must be an object", .{}, ctx, exception); + return transpiler; + } + + var replacements = Runtime.Features.ReplaceableExport.Map{}; + errdefer replacements.clearAndFree(bun.default_allocator); + + if (exports.getTruthy(globalThis, "eliminate")) |eliminate| { + if (!eliminate.jsType().isArray()) { + JSC.throwInvalidArguments("exports.eliminate must be an array", .{}, ctx, exception); + return transpiler; + } + + var total_name_buf_len: u32 = 0; + var string_count: u32 = 0; + var iter = JSC.JSArrayIterator.init(eliminate, globalThis); + { + var length_iter = iter; + while (length_iter.next()) |value| { + if (value.isString()) { + const length = value.getLengthOfArray(globalThis); + string_count += @as(u32, @boolToInt(length > 0)); + total_name_buf_len += length; + } + } + } + + if (total_name_buf_len > 0) { + var buf = try std.ArrayListUnmanaged(u8).initCapacity(bun.default_allocator, total_name_buf_len); + try replacements.ensureUnusedCapacity(bun.default_allocator, string_count); + { + var length_iter = iter; + while (length_iter.next()) |value| { + if (!value.isString()) continue; + var str = value.getZigString(globalThis); + if (str.len == 0) continue; + const name = std.fmt.bufPrint(buf.items.ptr[buf.items.len..buf.capacity], "{}", .{str}) catch { + JSC.throwInvalidArguments("Error reading exports.eliminate. TODO: utf-16", .{}, ctx, exception); + return transpiler; + }; + buf.items.len += name.len; + if (name.len > 0) { + replacements.putAssumeCapacity(name, .{ .delete = .{} }); + } + } + } + } + } + + if (exports.getTruthy(globalThis, "replace")) |replace| { + if (!replace.isObject()) { + JSC.throwInvalidArguments("replace must be an object", .{}, ctx, exception); + return transpiler; + } + + var total_name_buf_len: usize = 0; + + var array = js.JSObjectCopyPropertyNames(ctx, replace.asObjectRef()); + defer js.JSPropertyNameArrayRelease(array); + const property_names_count = @intCast(u32, js.JSPropertyNameArrayGetCount(array)); + var iter = JSC.JSPropertyNameIterator{ + .array = array, + .count = @intCast(u32, property_names_count), + }; + + { + var key_iter = iter; + while (key_iter.next()) |item| { + total_name_buf_len += JSC.C.JSStringGetLength(item); + } + } + + if (total_name_buf_len > 0) { + var total_name_buf = try std.ArrayList(u8).initCapacity(bun.default_allocator, total_name_buf_len); + errdefer total_name_buf.clearAndFree(); + + try replacements.ensureUnusedCapacity(bun.default_allocator, property_names_count); + defer { + if (exception.* != null) { + total_name_buf.clearAndFree(); + replacements.clearAndFree(bun.default_allocator); + } + } + + while (iter.next()) |item| { + const start = total_name_buf.items.len; + total_name_buf.items.len += @maximum( + // this returns a null terminated string + JSC.C.JSStringGetUTF8CString(item, total_name_buf.items.ptr + start, total_name_buf.capacity - start), + 1, + ) - 1; + JSC.C.JSStringRelease(item); + const key = total_name_buf.items[start..total_name_buf.items.len]; + // if somehow the string is empty, skip it + if (key.len == 0) + continue; + + const value = replace.get(globalThis, key).?; + if (value.isEmpty()) continue; + + if (!JSLexer.isIdentifier(key)) { + JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{key}, ctx, exception); + total_name_buf.deinit(); + return transpiler; + } + + var entry = replacements.getOrPutAssumeCapacity(key); + + if (exportReplacementValue(value, globalThis)) |expr| { + entry.value_ptr.* = .{ .replace = expr }; + continue; + } + + if (value.isObject() and value.getLengthOfArray(ctx.ptr()) == 2) { + const replacementValue = JSC.JSObject.getIndex(value, globalThis, 1); + if (exportReplacementValue(replacementValue, globalThis)) |to_replace| { + const replacementKey = JSC.JSObject.getIndex(value, globalThis, 0); + var slice = (try replacementKey.toSlice(globalThis, bun.default_allocator).cloneIfNeeded()); + var replacement_name = slice.slice(); + + if (!JSLexer.isIdentifier(replacement_name)) { + JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{replacement_name}, ctx, exception); + total_name_buf.deinit(); + slice.deinit(); + return transpiler; + } + + entry.value_ptr.* = .{ + .inject = .{ + .name = replacement_name, + .value = to_replace, + }, + }; + continue; + } + } + + JSC.throwInvalidArguments("exports.replace values can only be string, null, undefined, number or boolean", .{}, ctx, exception); + return transpiler; + } + } + } + + tree_shaking = tree_shaking orelse (replacements.count() > 0); + transpiler.runtime.replace_exports = replacements; + } + + transpiler.tree_shaking = tree_shaking orelse false; + transpiler.trim_unused_imports = trim_unused_imports orelse transpiler.tree_shaking; + return transpiler; } @@ -511,7 +740,10 @@ pub fn constructor( var args = JSC.Node.ArgumentsSlice.init(@ptrCast([*]const JSC.JSValue, arguments.ptr)[0..arguments.len]); defer temp.deinit(); const transpiler_options: TranspilerOptions = if (arguments.len > 0) - transformOptionsFromJSC(ctx, temp.allocator(), &args, exception) + transformOptionsFromJSC(ctx, temp.allocator(), &args, exception) catch { + JSC.throwInvalidArguments("Failed to create transpiler", .{}, ctx, exception); + return null; + } else TranspilerOptions{ .log = logger.Log.init(getAllocator(ctx)) }; @@ -561,6 +793,8 @@ pub fn constructor( bundler.options.macro_remap = transpiler_options.macro_map; } + bundler.options.tree_shaking = transpiler_options.tree_shaking; + bundler.options.trim_unused_imports = transpiler_options.trim_unused_imports; bundler.options.allow_runtime = transpiler_options.runtime.allow_runtime; bundler.options.auto_import_jsx = transpiler_options.runtime.auto_import_jsx; bundler.options.hot_module_reloading = transpiler_options.runtime.hot_module_reloading; @@ -612,6 +846,7 @@ fn getParseResult(this: *Transpiler, allocator: std.mem.Allocator, code: []const .jsx = jsx, .path = source.path, .virtual_source = &source, + .replace_exports = this.transpiler_options.runtime.replace_exports, // .allocator = this. }; diff --git a/src/javascript/jsc/bindings/bindings.zig b/src/javascript/jsc/bindings/bindings.zig index 35f565bc2..3cec43447 100644 --- a/src/javascript/jsc/bindings/bindings.zig +++ b/src/javascript/jsc/bindings/bindings.zig @@ -119,6 +119,15 @@ pub const ZigString = extern struct { return Slice{ .allocator = allocator, .ptr = duped.ptr, .len = this.len, .allocated = true }; } + pub fn cloneIfNeeded(this: Slice) !Slice { + if (this.allocated) { + return this; + } + + var duped = try this.allocator.dupe(u8, this.ptr[0..this.len]); + return Slice{ .allocator = this.allocator, .ptr = duped.ptr, .len = this.len, .allocated = true }; + } + pub fn cloneZ(this: Slice, allocator: std.mem.Allocator) !Slice { if (this.allocated or this.len == 0) { return this; diff --git a/src/options.zig b/src/options.zig index e4b423098..99a82c7cb 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1120,6 +1120,8 @@ pub const BundleOptions = struct { auto_import_jsx: bool = true, allow_runtime: bool = true, + trim_unused_imports: ?bool = null, + hot_module_reloading: bool = false, inject: ?[]string = null, origin: URL = URL{}, diff --git a/src/runtime.zig b/src/runtime.zig index 08d23ac7e..4b4a5a3e2 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -14,7 +14,7 @@ const resolve_path = @import("./resolver/resolve_path.zig"); const Fs = @import("./fs.zig"); const Schema = @import("./api/schema.zig"); const Ref = @import("ast/base.zig").Ref; - +const JSAst = @import("./js_ast.zig"); // packages/bun-cli-*/bun const BUN_ROOT = "../../"; @@ -273,6 +273,21 @@ pub const Runtime = struct { top_level_await: bool = false, auto_import_jsx: bool = false, allow_runtime: bool = true, + + trim_unused_imports: bool = false, + + replace_exports: ReplaceableExport.Map = .{}, + + pub const ReplaceableExport = union(enum) { + delete: void, + replace: JSAst.Expr, + inject: struct { + name: string, + value: JSAst.Expr, + }, + + pub const Map = std.StringArrayHashMapUnmanaged(ReplaceableExport); + }; }; pub const Names = struct { diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 1483729fa..95bd8ee4d 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1335,7 +1335,7 @@ pub inline fn wtf8ByteSequenceLengthWithInvalid(first_byte: u8) u3 { else if ((first_byte & 0xF8) == 0xF0) @as(u3, 4) else - @as(u3, 0), + @as(u3, 1), }; } diff --git a/src/toml/toml_lexer.zig b/src/toml/toml_lexer.zig index 71e6378fb..fac1016d6 100644 --- a/src/toml/toml_lexer.zig +++ b/src/toml/toml_lexer.zig @@ -139,12 +139,12 @@ pub const Lexer = struct { } inline fn nextCodepointSlice(it: *Lexer) []const u8 { - const cp_len = strings.wtf8ByteSequenceLength(it.source.contents.ptr[it.current]); + const cp_len = strings.wtf8ByteSequenceLengthWithInvalid(it.source.contents.ptr[it.current]); return if (!(cp_len + it.current > it.source.contents.len)) it.source.contents[it.current .. cp_len + it.current] else ""; } inline fn nextCodepoint(it: *Lexer) CodePoint { - const cp_len = strings.wtf8ByteSequenceLength(it.source.contents.ptr[it.current]); + const cp_len = strings.wtf8ByteSequenceLengthWithInvalid(it.source.contents.ptr[it.current]); const slice = if (!(cp_len + it.current > it.source.contents.len)) it.source.contents[it.current .. cp_len + it.current] else ""; const code_point = switch (slice.len) { |