aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-01-27 01:25:09 -0800
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-01-27 01:25:09 -0800
commit32733210dc1de752e297ad4b2d4893d7a22ae4f6 (patch)
treeb0d06b7b4106355d0bebacc7bec970ac7822ca7c /src
parent9f5a1705a51b92cdeabce177dcef3a2459ef0c8a (diff)
downloadbun-32733210dc1de752e297ad4b2d4893d7a22ae4f6.tar.gz
bun-32733210dc1de752e297ad4b2d4893d7a22ae4f6.tar.zst
bun-32733210dc1de752e297ad4b2d4893d7a22ae4f6.zip
[bunfig] Implement config file format
Diffstat (limited to 'src')
-rw-r--r--src/analytics/analytics_thread.zig2
-rw-r--r--src/api/schema.d.ts8
-rw-r--r--src/api/schema.js23
-rw-r--r--src/api/schema.peechy5
-rw-r--r--src/api/schema.zig13
-rw-r--r--src/bun_js.zig5
-rw-r--r--src/bundler.zig55
-rw-r--r--src/bunfig.zig280
-rw-r--r--src/cli.zig274
-rw-r--r--src/cli/bun_command.zig11
-rw-r--r--src/defines.zig4
-rw-r--r--src/http.zig13
-rw-r--r--src/install/install.zig4
-rw-r--r--src/javascript/jsc/api/transpiler.zig3
-rw-r--r--src/javascript/jsc/javascript.zig18
-rw-r--r--src/javascript/jsc/test/jest.zig2
-rw-r--r--src/js_ast.zig53
-rw-r--r--src/logger.zig19
-rw-r--r--src/options.zig25
-rw-r--r--src/resolver/resolver.zig6
20 files changed, 667 insertions, 156 deletions
diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig
index 43d7e2050..1481dd94e 100644
--- a/src/analytics/analytics_thread.zig
+++ b/src/analytics/analytics_thread.zig
@@ -51,6 +51,7 @@ pub const Features = struct {
pub var origin = false;
pub var external = false;
pub var fetch = false;
+ pub var bunfig = false;
pub fn formatter() Formatter {
return Formatter{};
@@ -77,6 +78,7 @@ pub const Features = struct {
"origin",
"external",
"fetch",
+ "bunfig",
};
inline for (fields) |field| {
if (@field(Features, field)) {
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index 490a74421..f0437d19c 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -202,7 +202,8 @@ export enum MessageLevel {
err = 1,
warn = 2,
note = 3,
- debug = 4,
+ info = 4,
+ debug = 5,
}
export const MessageLevelKeys = {
1: "err",
@@ -211,7 +212,9 @@ export const MessageLevelKeys = {
warn: "warn",
3: "note",
note: "note",
- 4: "debug",
+ 4: "info",
+ info: "info",
+ 5: "debug",
debug: "debug",
};
export enum Reloader {
@@ -482,6 +485,7 @@ export interface TransformOptions {
no_summary?: boolean;
disable_hmr?: boolean;
port?: uint16;
+ logLevel?: MessageLevel;
}
export interface FileHandle {
diff --git a/src/api/schema.js b/src/api/schema.js
index 8339484da..0405b4457 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -1706,6 +1706,10 @@ function decodeTransformOptions(bb) {
result["port"] = bb.readUint16();
break;
+ case 26:
+ result["logLevel"] = MessageLevel[bb.readVarUint()];
+ break;
+
default:
throw new Error("Attempted to parse invalid message");
}
@@ -1902,6 +1906,17 @@ function encodeTransformOptions(message, bb) {
bb.writeByte(25);
bb.writeUint16(value);
}
+
+ var value = message["logLevel"];
+ if (value != null) {
+ bb.writeByte(26);
+ var encoded = MessageLevel[value];
+ if (encoded === void 0)
+ throw new Error(
+ "Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"'
+ );
+ bb.writeVarUint(encoded);
+ }
bb.writeByte(0);
}
@@ -2104,19 +2119,23 @@ const MessageLevel = {
2: 2,
3: 3,
4: 4,
+ 5: 5,
err: 1,
warn: 2,
note: 3,
- debug: 4,
+ info: 4,
+ debug: 5,
};
const MessageLevelKeys = {
1: "err",
2: "warn",
3: "note",
- 4: "debug",
+ 4: "info",
+ 5: "debug",
err: "err",
warn: "warn",
note: "note",
+ info: "info",
debug: "debug",
};
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index cd5134299..5dea057a9 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -342,6 +342,8 @@ message TransformOptions {
bool disable_hmr = 24;
uint16 port = 25;
+ MessageLevel logLevel = 26;
+
}
struct FileHandle {
@@ -379,7 +381,8 @@ enum MessageLevel {
err = 1;
warn =2;
note = 3;
- debug = 4;
+ info = 4;
+ debug = 5;
}
struct Location {
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 4fe8d8acb..4bca9cd1f 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -1735,6 +1735,9 @@ pub const Api = struct {
/// port
port: ?u16 = null,
+ /// logLevel
+ log_level: ?MessageLevel = null,
+
pub fn decode(reader: anytype) anyerror!TransformOptions {
var this = std.mem.zeroes(TransformOptions);
@@ -1819,6 +1822,9 @@ pub const Api = struct {
25 => {
this.port = try reader.readValue(u16);
},
+ 26 => {
+ this.log_level = try reader.readValue(MessageLevel);
+ },
else => {
return error.InvalidMessage;
},
@@ -1928,6 +1934,10 @@ pub const Api = struct {
try writer.writeFieldID(25);
try writer.writeInt(port);
}
+ if (this.log_level) |log_level| {
+ try writer.writeFieldID(26);
+ try writer.writeEnum(log_level);
+ }
try writer.endMessage();
}
};
@@ -2104,6 +2114,9 @@ pub const Api = struct {
/// note
note,
+ /// info
+ info,
+
/// debug
debug,
diff --git a/src/bun_js.zig b/src/bun_js.zig
index 0915cc55c..1caa77e72 100644
--- a/src/bun_js.zig
+++ b/src/bun_js.zig
@@ -38,6 +38,7 @@ pub const Run = struct {
ctx: Command.Context,
vm: *VirtualMachine,
entry_path: string,
+
pub fn boot(ctx: Command.Context, file: std.fs.File, entry_path: string) !void {
@import("javascript/jsc/javascript_core_c_api.zig").JSCInitialize();
@@ -53,6 +54,10 @@ pub const Run = struct {
run.vm.argv = ctx.positionals;
+ if (ctx.debug.macros) |macros| {
+ run.vm.bundler.options.macro_remap = macros;
+ }
+
run.vm.bundler.configureRouter(false) catch {
if (Output.enable_ansi_colors_stderr) {
run.vm.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
diff --git a/src/bundler.zig b/src/bundler.zig
index a4633421e..bcbcf910e 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -21,6 +21,7 @@ const js_ast = @import("js_ast.zig");
const linker = @import("linker.zig");
const Ref = @import("ast/base.zig").Ref;
const Define = @import("defines.zig").Define;
+const DebugOptions = @import("./cli.zig").Command.DebugOptions;
const panicky = @import("panic_handler.zig");
const Fs = @import("fs.zig");
@@ -692,6 +693,7 @@ pub const Bundler = struct {
always_bundled_package_hashes: []u32 = &[_]u32{},
always_bundled_package_jsons: []*const PackageJSON = &.{},
+ package_bundle_map: options.BundlePackage.Map = options.BundlePackage.Map{},
const U32Map = std.AutoHashMap(u32, u32);
pub const current_version: u32 = 1;
@@ -777,6 +779,7 @@ pub const Bundler = struct {
route_config: ?Api.LoadedRouteConfig,
destination: [*:0]const u8,
estimated_input_lines_of_code: *usize,
+ package_bundle_map: options.BundlePackage.Map,
) !?Api.JavascriptBundleContainer {
_ = try bundler.fs.fs.openTmpDir();
var tmpname_buf: [64]u8 = undefined;
@@ -814,6 +817,7 @@ pub const Bundler = struct {
.package_list_map = std.AutoHashMap(u64, u32).init(allocator),
.pool = undefined,
.write_lock = Lock.init(),
+ .package_bundle_map = package_bundle_map,
};
// dist/index.js appears more common than /index.js
// but this means we can store both "dist/index.js" and "index.js" in one.
@@ -850,15 +854,26 @@ pub const Bundler = struct {
break :brk read_dir.package_json.?;
};
Analytics.setProjectID(std.fs.path.dirname(root_package_json.source.path.text) orelse "/", root_package_json.name);
- Analytics.Features.macros = Analytics.Features.macros or root_package_json.macros.count() > 0;
+ if (bundler.macro_context) |macro_ctx| {
+ Analytics.Features.macros = macro_ctx.remap.count() > 0;
+ }
+
+ const bundle_keys = package_bundle_map.keys();
+ const do_always_bundle = package_bundle_map.values();
+ var always_bundle_count: u32 = 0;
+ for (do_always_bundle) |always| {
+ always_bundle_count += @as(u32, @boolToInt(always == .always));
+ }
- if (root_package_json.always_bundle.len > 0) {
+ if (always_bundle_count > 0) {
Analytics.Features.always_bundle = true;
- var always_bundled_package_jsons = bundler.allocator.alloc(*PackageJSON, root_package_json.always_bundle.len) catch unreachable;
- var always_bundled_package_hashes = bundler.allocator.alloc(u32, root_package_json.always_bundle.len) catch unreachable;
+ var always_bundled_package_jsons = bundler.allocator.alloc(*PackageJSON, always_bundle_count) catch unreachable;
+ var always_bundled_package_hashes = bundler.allocator.alloc(u32, always_bundle_count) catch unreachable;
var i: u16 = 0;
- inner: for (root_package_json.always_bundle) |name| {
+ inner: for (bundle_keys) |name, k| {
+ if (do_always_bundle[k] != .always) continue;
+
std.mem.copy(u8, &tmp_buildfile_buf, name);
std.mem.copy(u8, tmp_buildfile_buf[name.len..], "/package.json");
const package_json_import = tmp_buildfile_buf[0 .. name.len + "/package.json".len];
@@ -1294,6 +1309,10 @@ pub const Bundler = struct {
if (resolve_result.package_json) |pkg_| {
var pkg: *const PackageJSON = pkg_;
+ if (this.package_bundle_map.get(pkg.name)) |result| {
+ if (result == .never) return null;
+ }
+
if (std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash)) |pkg_i| {
pkg = this.always_bundled_package_jsons[pkg_i];
const key_path_source_dir = pkg.source.key_path.sourceDir();
@@ -1507,14 +1526,18 @@ pub const Bundler = struct {
var shared_buffer = &worker.data.shared_buffer;
var scan_pass_result = &worker.data.scan_pass_result;
+ var file_path = (resolve.pathConst() orelse unreachable).*;
- const is_from_node_modules = resolve.isLikelyNodeModule() or brk: {
- if (resolve.package_json) |pkg| {
- break :brk std.mem.indexOfScalar(u32, this.always_bundled_package_hashes, pkg.hash) != null;
+ const add_to_bundle = brk: {
+ if (resolve.package_json) |package_json| {
+ if (this.package_bundle_map.get(package_json.name)) |result| {
+ break :brk result == .always;
+ }
}
- break :brk false;
+
+ break :brk resolve.isLikelyNodeModule();
};
- var file_path = (resolve.pathConst() orelse unreachable).*;
+
const source_dir = file_path.sourceDir();
const loader = bundler.options.loader(file_path.name.ext);
const platform = bundler.options.platform;
@@ -1525,7 +1548,7 @@ pub const Bundler = struct {
var log = worker.data.log;
// If we're in a node_module, build that almost normally
- if (is_from_node_modules) {
+ if (add_to_bundle) {
var code_offset: u32 = 0;
const module_data = BundledModuleData.getForceBundleForMain(this, &resolve) orelse {
@@ -1625,7 +1648,7 @@ pub const Bundler = struct {
opts.enable_bundling = true;
opts.warn_about_unbundled_modules = false;
opts.macro_context = &worker.data.macro_context;
- opts.macro_context.remap = package.macros;
+
ast = (bundler.resolver.caches.js.parse(
bundler.allocator,
opts,
@@ -2024,7 +2047,6 @@ pub const Bundler = struct {
jsx.parse = loader.isJSX();
var opts = js_parser.Parser.Options.init(jsx, loader);
opts.macro_context = &worker.data.macro_context;
- opts.macro_context.remap = resolve.getMacroRemappings();
try bundler.resolver.caches.js.scan(
bundler.allocator,
@@ -2303,7 +2325,7 @@ pub const Bundler = struct {
.dirname_fd = resolve_result.dirname_fd,
.file_descriptor = file_descriptor,
.file_hash = filepath_hash,
- .macro_remappings = resolve_result.getMacroRemappings(),
+ .macro_remappings = bundler.options.macro_remap,
.jsx = resolve_result.jsx,
},
client_entry_point,
@@ -2416,7 +2438,7 @@ pub const Bundler = struct {
.dirname_fd = resolve_result.dirname_fd,
.file_descriptor = null,
.file_hash = null,
- .macro_remappings = resolve_result.getMacroRemappings(),
+ .macro_remappings = bundler.options.macro_remap,
.jsx = resolve_result.jsx,
},
client_entry_point_,
@@ -2720,7 +2742,7 @@ pub const Bundler = struct {
opts.features.top_level_await = true;
opts.macro_context = &bundler.macro_context.?;
- opts.macro_context.remap = this_parse.macro_remappings;
+
opts.features.is_macro_runtime = bundler.options.platform == .bun_macro;
const value = (bundler.resolver.caches.js.parse(
@@ -2942,6 +2964,7 @@ pub const Bundler = struct {
bundler.configureLinker();
try bundler.configureRouter(false);
try bundler.configureDefines();
+ bundler.macro_context = js_ast.Macro.MacroContext.init(&bundler);
var skip_normalize = false;
var load_from_routes = false;
diff --git a/src/bunfig.zig b/src/bunfig.zig
new file mode 100644
index 000000000..0dd9a7d3f
--- /dev/null
+++ b/src/bunfig.zig
@@ -0,0 +1,280 @@
+const std = @import("std");
+const _global = @import("./global.zig");
+const string = _global.string;
+const Output = _global.Output;
+const Global = _global.Global;
+const Environment = _global.Environment;
+const strings = _global.strings;
+const MutableString = _global.MutableString;
+const stringZ = _global.stringZ;
+const default_allocator = _global.default_allocator;
+const URL = @import("./query_string_map.zig").URL;
+const C = _global.C;
+const options = @import("./options.zig");
+const logger = @import("./logger.zig");
+const cache = @import("./cache.zig");
+const js_ast = @import("./js_ast.zig");
+const js_lexer = @import("./js_lexer.zig");
+const Defines = @import("./defines.zig");
+const ConditionsMap = @import("./resolver/package_json.zig").ESModule.ConditionsMap;
+const Api = @import("./api/schema.zig").Api;
+const Npm = @import("./install/npm.zig");
+const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
+const resolver = @import("./resolver/resolver.zig");
+pub const MacroImportReplacementMap = std.StringArrayHashMap(string);
+pub const MacroMap = std.StringArrayHashMapUnmanaged(MacroImportReplacementMap);
+pub const BundlePackageOverride = std.StringArrayHashMapUnmanaged(options.BundleOverride);
+const LoaderMap = std.StringArrayHashMapUnmanaged(options.Loader);
+const Analytics = @import("./analytics.zig");
+const JSONParser = @import("./json_parser.zig");
+const Command = @import("cli.zig").Command;
+
+pub const Bunfig = struct {
+ const Parser = struct {
+ json: js_ast.Expr,
+ source: *const logger.Source,
+ log: *logger.Log,
+ allocator: std.mem.Allocator,
+ bunfig: *Api.TransformOptions,
+ ctx: *Command.Context,
+
+ fn addError(this: *Parser, loc: logger.Loc, comptime text: string) !void {
+ this.log.addError(this.source, loc, text) catch unreachable;
+ return error.@"Invalid Bunfig";
+ }
+
+ pub fn parse(this: *Parser, comptime cmd: Command.Tag) !void {
+ const json = this.json;
+ var allocator = this.allocator;
+
+ if (json.data != .e_object) {
+ try this.addError(json.loc, "bunfig expects an object { } at the root");
+ }
+
+ if (json.get("define")) |expr| {
+ try this.expect(expr, .e_object);
+ var valid_count: usize = 0;
+ const properties = expr.data.e_object.properties;
+ for (properties) |prop| {
+ if (prop.value.?.data != .e_string) continue;
+ valid_count += 1;
+ }
+
+ var buffer = allocator.alloc([]const u8, valid_count * 2) catch unreachable;
+ var keys = buffer[0..valid_count];
+ var values = buffer[valid_count..];
+ var i: usize = 0;
+ for (properties) |prop| {
+ if (prop.value.?.data != .e_string) continue;
+ keys[i] = prop.key.?.data.e_string.string(allocator) catch unreachable;
+ values[i] = prop.value.?.data.e_string.string(allocator) catch unreachable;
+ i += 1;
+ }
+ this.bunfig.define = Api.StringMap{
+ .keys = keys,
+ .values = values,
+ };
+ }
+
+ if (json.get("origin")) |expr| {
+ try this.expect(expr, .e_string);
+ this.bunfig.origin = try expr.data.e_string.string(allocator);
+ }
+
+ if (comptime cmd == .DevCommand or cmd == .AutoCommand) {
+ if (json.get("dev")) |expr| {
+ if (expr.get("disableBunJS")) |disable| {
+ this.ctx.debug.fallback_only = disable.asBool() orelse false;
+ }
+
+ if (expr.get("port")) |port| {
+ try this.expect(port, .e_number);
+ this.bunfig.port = port.data.e_number.toU16();
+ if (this.bunfig.port.? == 0) {
+ this.bunfig.port = 3000;
+ }
+ }
+
+ if (expr.get("port")) |port| {
+ try this.expect(port, .e_number);
+ this.bunfig.port = port.data.e_number.toU16();
+ if (this.bunfig.port.? == 0) {
+ this.bunfig.port = 3000;
+ }
+ }
+ }
+ }
+
+ if (json.get("bundle")) |bun| {
+ if (comptime cmd == .DevCommand or cmd == .BuildCommand or cmd == .RunCommand or cmd == .AutoCommand or cmd == .BunCommand) {
+ if (bun.get("saveTo")) |file| {
+ try this.expect(file, .e_string);
+ this.bunfig.node_modules_bundle_path = try file.data.e_string.string(allocator);
+ }
+ }
+
+ if (comptime cmd == .BunCommand) {
+ if (bun.get("entryPoints")) |entryPoints| {
+ try this.expect(entryPoints, .e_array);
+ const items = entryPoints.data.e_array.items;
+ var names = try this.allocator.alloc(string, items.len);
+ for (items) |item, i| {
+ try this.expect(item, .e_string);
+ names[i] = try item.data.e_string.string(allocator);
+ }
+ this.bunfig.entry_points = names;
+ }
+
+ if (bun.get("packages")) |expr| {
+ try this.expect(expr, .e_object);
+ var valid_count: usize = 0;
+ Analytics.Features.always_bundle = true;
+
+ const object = expr.data.e_object;
+ for (object.properties) |prop| {
+ if (prop.value.?.data != .e_boolean) continue;
+ valid_count += 1;
+ }
+
+ try this.ctx.debug.package_bundle_map.ensureTotalCapacity(allocator, valid_count);
+
+ for (object.properties) |prop| {
+ if (prop.value.?.data != .e_boolean) continue;
+
+ const path = try prop.key.?.data.e_string.string(allocator);
+
+ if (!resolver.isPackagePath(path)) {
+ try this.addError(prop.key.?.loc, "Expected package name");
+ }
+
+ this.ctx.debug.package_bundle_map.putAssumeCapacity(path, switch (prop.value.?.asBool() orelse false) {
+ true => options.BundlePackage.always,
+ false => options.BundlePackage.never,
+ });
+ }
+ }
+ }
+ }
+
+ switch (comptime cmd) {
+ .AutoCommand, .DevCommand, .BuildCommand, .BunCommand => {
+ if (json.get("publicDir")) |public_dir| {
+ try this.expect(public_dir, .e_string);
+ this.bunfig.router = Api.RouteConfig{ .extensions = &.{}, .dir = &.{}, .static_dir = try public_dir.data.e_string.string(allocator) };
+ }
+ },
+ else => {},
+ }
+
+ if (json.get("macros")) |expr| {
+ // technical debt
+ this.ctx.debug.macros = PackageJSON.parseMacrosJSON(allocator, expr, this.log, this.source);
+ Analytics.Features.macros = true;
+ }
+
+ if (json.get("external")) |expr| {
+ switch (expr.data) {
+ .e_string => |str| {
+ var externals = try allocator.alloc(string, 1);
+ externals[0] = try str.string(allocator);
+ this.bunfig.external = externals;
+ },
+ .e_array => |array| {
+ var externals = try allocator.alloc(string, array.items.len);
+
+ for (array.items) |item, i| {
+ try this.expect(item, .e_string);
+ externals[i] = try item.data.e_string.string(allocator);
+ }
+
+ this.bunfig.external = externals;
+ },
+ else => try this.addError(expr.loc, "Expected string or array"),
+ }
+ }
+
+ if (json.get("framework")) |expr| {
+ try this.expect(expr, .e_string);
+ this.bunfig.framework = Api.FrameworkConfig{
+ .package = expr.asString(allocator).?,
+ };
+ }
+
+ if (json.get("loader")) |expr| {
+ try this.expect(expr, .e_object);
+ const properties = expr.data.e_object.properties;
+ var loader_names = try this.allocator.alloc(string, properties.len);
+ var loader_values = try this.allocator.alloc(Api.Loader, properties.len);
+
+ for (properties) |item, i| {
+ var key = item.key.?.asString(allocator).?;
+ if (key.len == 0) continue;
+ if (key[0] != '.') {
+ try this.addError(item.key.?.loc, "file extension must start with a dot");
+ }
+ var value = item.value.?;
+ try this.expect(value, .e_string);
+
+ const loader = options.Loader.fromString(value.asString(allocator).?) orelse {
+ try this.addError(value.loc, "Invalid loader");
+ unreachable;
+ };
+
+ loader_names[i] = key;
+ loader_values[i] = loader.toAPI();
+ }
+ this.bunfig.loaders = Api.LoaderMap{
+ .extensions = loader_names,
+ .loaders = loader_values,
+ };
+ }
+
+ if (json.get("logLevel")) |expr| {
+ try this.expect(expr, .e_string);
+ const Matcher = strings.ExactSizeMatcher(8);
+
+ this.bunfig.log_level = switch (Matcher.match(expr.asString(allocator).?)) {
+ Matcher.case("debug") => Api.MessageLevel.debug,
+ Matcher.case("error") => Api.MessageLevel.err,
+ Matcher.case("warn") => Api.MessageLevel.warn,
+ else => {
+ try this.addError(expr.loc, "Invalid log level, must be one of debug, error, or warn");
+ unreachable;
+ },
+ };
+ }
+
+ Analytics.Features.bunfig = true;
+ }
+
+ pub fn expect(this: *Parser, expr: js_ast.Expr, token: js_ast.Expr.Tag) !void {
+ if (@as(js_ast.Expr.Tag, expr.data) != token) {
+ this.log.addErrorFmt(this.source, expr.loc, this.allocator, "expected {} but received {}", .{
+ token,
+ @as(js_ast.Expr.Tag, expr.data),
+ }) catch unreachable;
+ return error.@"Invalid Bunfig";
+ }
+ }
+ };
+
+ pub fn parse(allocator: std.mem.Allocator, source: logger.Source, ctx: *Command.Context, comptime cmd: Command.Tag) !void {
+ const log_count = ctx.log.errors + ctx.log.warnings;
+ var expr = JSONParser.ParseTSConfig(&source, ctx.log, allocator) catch |err| {
+ if (ctx.log.errors + ctx.log.warnings == log_count) {
+ ctx.log.addErrorFmt(&source, logger.Loc.Empty, allocator, "Failed to parse", .{}) catch unreachable;
+ }
+ return err;
+ };
+
+ var parser = Parser{
+ .json = expr,
+ .log = ctx.log,
+ .allocator = allocator,
+ .source = &source,
+ .bunfig = &ctx.args,
+ .ctx = ctx,
+ };
+ try parser.parse(cmd);
+ }
+};
diff --git a/src/cli.zig b/src/cli.zig
index 8974704db..a223324a8 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -52,8 +52,11 @@ const ShellCompletions = @import("./cli/shell_completions.zig");
const TestCommand = @import("./cli/test_command.zig").TestCommand;
const UpgradeCommand = @import("./cli/upgrade_command.zig").UpgradeCommand;
+const MacroMap = @import("./resolver/package_json.zig").MacroMap;
+
const Reporter = @import("./report.zig");
var start_time: i128 = undefined;
+const Bunfig = @import("./bunfig.zig").Bunfig;
pub const Cli = struct {
var wait_group: sync.WaitGroup = undefined;
@@ -160,6 +163,7 @@ pub const Arguments = struct {
clap.parseParam("--bunfile <STR> Use a .bun file (default: node_modules.bun)") catch unreachable,
clap.parseParam("--server-bunfile <STR> Use a .server.bun file (default: node_modules.server.bun)") catch unreachable,
clap.parseParam("--cwd <STR> Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable,
+ clap.parseParam("-c, --config <PATH>? Config file to load bun from (e.g. -c bunfig.json") catch unreachable,
clap.parseParam("--disable-react-fast-refresh Disable React Fast Refresh") catch unreachable,
clap.parseParam("--disable-hmr Disable Hot Module Reloading (disables fast refresh too)") catch unreachable,
clap.parseParam("--extension-order <STR>... defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable,
@@ -181,7 +185,7 @@ pub const Arguments = struct {
clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx, ts, css") catch unreachable,
clap.parseParam("-u, --origin <STR> Rewrite import URLs to start with --origin. Default: \"\"") catch unreachable,
- clap.parseParam("-p, --port <STR> Port to serve bun's dev server on. Default: \"3000\"") catch unreachable,
+ clap.parseParam("-p, --port <STR> Port to serve bun's dev server on. Default: \"3000\"") catch unreachable,
clap.parseParam("--silent Don't repeat the command for bun run") catch unreachable,
clap.parseParam("<POS>... ") catch unreachable,
};
@@ -225,40 +229,112 @@ pub const Arguments = struct {
cwd = try std.process.getCwdAlloc(allocator);
}
+ var opts: Api.TransformOptions = ctx.args;
+ opts.absolute_working_dir = cwd;
+
+ if (comptime Command.Tag.loads_config.get(cmd)) {
+ if (args.option("--config")) |config_path__| {
+ var config_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ var config_path_ = config_path__;
+ if (config_path_.len == 0) {
+ config_path_ = "bunfig.json";
+ }
+ var config_path: [:0]u8 = undefined;
+ if (config_path_[0] == '/') {
+ @memcpy(&config_buf, config_path_.ptr, config_path_.len);
+ config_buf[config_path_.len] = 0;
+ config_path = config_buf[0..config_path_.len :0];
+ } else {
+ var parts = [_]string{ cwd, config_path_ };
+ config_path_ = resolve_path.joinAbsStringBuf(
+ cwd,
+ &config_buf,
+ &parts,
+ .auto,
+ );
+ config_buf[config_path_.len] = 0;
+ config_path = config_buf[0..config_path_.len :0];
+ }
+
+ var config_file = std.fs.openFileAbsoluteZ(config_path, .{ .read = true }) catch |err| {
+ Output.prettyErrorln("<r><red>error<r>: {s} opening config \"{s}\"", .{
+ @errorName(err),
+ std.mem.span(config_path),
+ });
+ Output.flush();
+ std.os.exit(1);
+ };
+ var contents = config_file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| {
+ Output.prettyErrorln("<r><red>error<r>: {s} reading config \"{s}\"", .{
+ @errorName(err),
+ std.mem.span(config_path),
+ });
+ Output.flush();
+ std.os.exit(1);
+ };
+
+ js_ast.Stmt.Data.Store.create(allocator);
+ js_ast.Expr.Data.Store.create(allocator);
+ defer {
+ js_ast.Stmt.Data.Store.reset();
+ js_ast.Expr.Data.Store.reset();
+ }
+ var original_level = ctx.log.level;
+ defer {
+ ctx.log.level = original_level;
+ }
+ ctx.log.level = logger.Log.Level.warn;
+ try Bunfig.parse(allocator, logger.Source.initPathString(std.mem.span(config_path), contents), ctx, cmd);
+ opts = ctx.args;
+ }
+ }
+
var defines_tuple = try DefineColonList.resolve(allocator, args.options("--define"));
+
+ if (defines_tuple.keys.len > 0) {
+ opts.define = .{
+ .keys = defines_tuple.keys,
+ .values = defines_tuple.values,
+ };
+ }
+
var loader_tuple = try LoaderColonList.resolve(allocator, args.options("--loader"));
- var externals = std.mem.zeroes([][]u8);
+
+ if (loader_tuple.keys.len > 0) {
+ opts.loaders = .{
+ .extensions = loader_tuple.keys,
+ .loaders = loader_tuple.values,
+ };
+ }
+
if (args.options("--external").len > 0) {
- externals = try allocator.alloc([]u8, args.options("--external").len);
+ var externals = try allocator.alloc([]u8, args.options("--external").len);
for (args.options("--external")) |external, i| {
externals[i] = constStrToU8(external);
}
+ opts.external = externals;
}
- var opts = Api.TransformOptions{
- .tsconfig_override = if (args.option("--tsconfig-override")) |ts| (Arguments.readFile(allocator, cwd, ts) catch |err| fileReadError(err, Output.errorStream(), ts, "tsconfig.json")) else null,
- .external = externals,
- .absolute_working_dir = cwd,
- .origin = args.option("--origin"),
- .define = .{
- .keys = defines_tuple.keys,
- .values = defines_tuple.values,
- },
- .loaders = .{
- .extensions = loader_tuple.keys,
- .loaders = loader_tuple.values,
- },
- .port = if (args.option("--port")) |port_str| std.fmt.parseInt(u16, port_str, 10) catch return error.InvalidPort else null,
-
- .serve = cmd == .DevCommand or (FeatureFlags.dev_only and cmd == .AutoCommand),
- .main_fields = args.options("--main-fields"),
- .generate_node_module_bundle = cmd == .BunCommand,
- .inject = args.options("--inject"),
- .extension_order = args.options("--extension-order"),
- .entry_points = undefined,
- .no_summary = args.flag("--no-summary"),
- .disable_hmr = args.flag("--disable-hmr"),
- };
+ opts.tsconfig_override = if (args.option("--tsconfig-override")) |ts|
+ (Arguments.readFile(allocator, cwd, ts) catch |err| fileReadError(err, Output.errorStream(), ts, "tsconfig.json"))
+ else
+ null;
+
+ if (args.option("--origin")) |origin| {
+ opts.origin = origin;
+ }
+
+ if (args.option("--port")) |port_str| {
+ opts.port = std.fmt.parseInt(u16, port_str, 10) catch return error.InvalidPort;
+ }
+ opts.serve = cmd == .DevCommand or (FeatureFlags.dev_only and cmd == .AutoCommand);
+ opts.main_fields = args.options("--main-fields");
+ opts.generate_node_module_bundle = cmd == .BunCommand;
+ opts.inject = args.options("--inject");
+ opts.extension_order = args.options("--extension-order");
+
+ opts.no_summary = args.flag("--no-summary");
+ opts.disable_hmr = args.flag("--disable-hmr");
ctx.positionals = args.positionals();
ctx.debug.silent = args.flag("--silent");
@@ -277,70 +353,70 @@ pub const Arguments = struct {
}
ctx.debug.dump_environment_variables = args.flag("--dump-environment-variables");
- ctx.debug.fallback_only = args.flag("--disable-bun.js");
+ ctx.debug.fallback_only = ctx.debug.fallback_only or args.flag("--disable-bun.js");
ctx.debug.dump_limits = args.flag("--dump-limits");
// var output_dir = args.option("--outdir");
var output_dir: ?string = null;
+ const production = false;
+
+ if (opts.entry_points.len == 0) {
+ var entry_points = args.positionals();
+
+ switch (comptime cmd) {
+ .BunCommand => {
+ if (entry_points.len > 0 and (strings.eqlComptime(
+ entry_points[0],
+ "bun",
+ ))) {
+ entry_points = entry_points[1..];
+ }
+ },
+ .DevCommand => {
+ if (entry_points.len > 0 and (strings.eqlComptime(
+ entry_points[0],
+ "dev",
+ ) or strings.eqlComptime(
+ entry_points[0],
+ "d",
+ ))) {
+ entry_points = entry_points[1..];
+ }
+ },
+ .BuildCommand => {
+ if (entry_points.len > 0 and (strings.eqlComptime(
+ entry_points[0],
+ "build",
+ ) or strings.eqlComptime(
+ entry_points[0],
+ "b",
+ ))) {
+ entry_points = entry_points[1..];
+ }
- var entry_points = args.positionals();
-
- switch (comptime cmd) {
- .BunCommand => {
- if (entry_points.len > 0 and (strings.eqlComptime(
- entry_points[0],
- "bun",
- ))) {
- entry_points = entry_points[1..];
- }
- },
- .DevCommand => {
- if (entry_points.len > 0 and (strings.eqlComptime(
- entry_points[0],
- "dev",
- ) or strings.eqlComptime(
- entry_points[0],
- "d",
- ))) {
- entry_points = entry_points[1..];
- }
- },
- .BuildCommand => {
- if (entry_points.len > 0 and (strings.eqlComptime(
- entry_points[0],
- "build",
- ) or strings.eqlComptime(
- entry_points[0],
- "b",
- ))) {
- entry_points = entry_points[1..];
- }
- },
- .RunCommand => {
- if (entry_points.len > 0 and (strings.eqlComptime(
- entry_points[0],
- "run",
- ) or strings.eqlComptime(
- entry_points[0],
- "r",
- ))) {
- entry_points = entry_points[1..];
- }
- },
- else => {},
- }
-
- const production = false; //args.flag("--production");
- if (comptime cmd == .BuildCommand) {
- var write = entry_points.len > 1 or output_dir != null;
- if (write and output_dir == null) {
- var _paths = [_]string{ cwd, "out" };
- output_dir = try std.fs.path.resolve(allocator, &_paths);
+ var write = entry_points.len > 1 or output_dir != null;
+ if (write and output_dir == null) {
+ var _paths = [_]string{ cwd, "out" };
+ output_dir = try std.fs.path.resolve(allocator, &_paths);
+ }
+ opts.write = write;
+ },
+ .RunCommand => {
+ if (entry_points.len > 0 and (strings.eqlComptime(
+ entry_points[0],
+ "run",
+ ) or strings.eqlComptime(
+ entry_points[0],
+ "r",
+ ))) {
+ entry_points = entry_points[1..];
+ }
+ },
+ else => {},
}
- opts.write = write;
- }
- opts.entry_points = entry_points;
+ opts.entry_points = entry_points;
+ }
var jsx_factory = args.option("--jsx-factory");
var jsx_fragment = args.option("--jsx-fragment");
@@ -353,13 +429,13 @@ pub const Arguments = struct {
};
if (comptime Command.Tag.cares_about_bun_file.get(cmd)) {
- opts.node_modules_bundle_path = args.option("--bunfile") orelse brk: {
+ opts.node_modules_bundle_path = args.option("--bunfile") orelse opts.node_modules_bundle_path orelse brk: {
const node_modules_bundle_path_absolute = resolve_path.joinAbs(cwd, .auto, "node_modules.bun");
break :brk std.fs.realpathAlloc(allocator, node_modules_bundle_path_absolute) catch null;
};
- opts.node_modules_bundle_path_server = args.option("--server-bunfile") orelse brk: {
+ opts.node_modules_bundle_path_server = args.option("--server-bunfile") orelse opts.node_modules_bundle_path_server orelse brk: {
const node_modules_bundle_path_absolute = resolve_path.joinAbs(cwd, .auto, "node_modules.server.bun");
break :brk std.fs.realpathAlloc(allocator, node_modules_bundle_path_absolute) catch null;
@@ -412,10 +488,11 @@ pub const Arguments = struct {
const PlatformMatcher = strings.ExactSizeMatcher(8);
if (args.option("--platform")) |_platform| {
- opts.platform = switch (PlatformMatcher.match(_platform)) {
+ opts.platform = opts.platform orelse switch (PlatformMatcher.match(_platform)) {
PlatformMatcher.case("browser") => Api.Platform.browser,
PlatformMatcher.case("node") => Api.Platform.node,
- PlatformMatcher.case("macro"), PlatformMatcher.case("bun") => Api.Platform.bun,
+ PlatformMatcher.case("macro") => if (cmd == .BuildCommand) Api.Platform.bun_macro else Api.Platform.bun,
+ PlatformMatcher.case("bun") => Api.Platform.bun,
else => invalidPlatform(&diag, _platform),
};
}
@@ -447,11 +524,21 @@ pub const Arguments = struct {
}
if (cmd == .BunCommand or !FeatureFlags.dev_only) {
- if (entry_points.len == 0 and opts.framework == null and opts.node_modules_bundle_path == null) {
+ if (opts.entry_points.len == 0 and opts.framework == null and opts.node_modules_bundle_path == null) {
return error.MissingEntryPoint;
}
}
+ if (opts.log_level) |log_level| {
+ logger.Log.default_log_level = switch (log_level) {
+ .debug => logger.Log.Level.debug,
+ .err => logger.Log.Level.err,
+ .warn => logger.Log.Level.warn,
+ else => logger.Log.Level.err,
+ };
+ ctx.log.level = logger.Log.default_log_level;
+ }
+
opts.output_dir = output_dir;
return opts;
}
@@ -587,6 +674,10 @@ pub const Command = struct {
dump_limits: bool = false,
fallback_only: bool = false,
silent: bool = false,
+
+ // technical debt
+ macros: ?MacroMap = null,
+ package_bundle_map: std.StringArrayHashMapUnmanaged(options.BundlePackage) = std.StringArrayHashMapUnmanaged(options.BundlePackage){},
};
pub const Context = struct {
@@ -1029,8 +1120,13 @@ pub const Command = struct {
.DevCommand = true,
.RunCommand = true,
.TestCommand = true,
+ .InstallCommand = true,
+ .AddCommand = true,
+ .RemoveCommand = true,
});
+ pub const loads_config = cares_about_bun_file;
+
pub const uses_global_options: std.EnumArray(Tag, bool) = std.EnumArray(Tag, bool).initDefault(true, .{
.CreateCommand = false,
.InstallCommand = false,
diff --git a/src/cli/bun_command.zig b/src/cli/bun_command.zig
index a0f7da655..454a9bf61 100644
--- a/src/cli/bun_command.zig
+++ b/src/cli/bun_command.zig
@@ -52,7 +52,6 @@ const ServerBundleGeneratorThread = struct {
env_loader_,
);
server_bundler.configureLinker();
-
server_bundler.options.jsx.supports_fast_refresh = false;
server_bundler.router = router;
@@ -61,6 +60,10 @@ const ServerBundleGeneratorThread = struct {
return err;
};
+ if (ctx.debug.macros) |macros| {
+ server_bundler.options.macro_remap = macros;
+ }
+
var estimated_input_lines_of_code: usize = 0;
_ = try bundler.Bundler.GenerateNodeModuleBundle.generate(
&server_bundler,
@@ -69,6 +72,7 @@ const ServerBundleGeneratorThread = struct {
route_conf_,
_filepath,
&estimated_input_lines_of_code,
+ ctx.debug.package_bundle_map,
);
std.mem.doNotOptimizeAway(&server_bundler);
}
@@ -118,6 +122,10 @@ pub const BunCommand = struct {
this_bundler.options.node_modules_bundle_url = "";
};
+ if (ctx.debug.macros) |macros| {
+ this_bundler.macro_context.?.remap = macros;
+ }
+
var loaded_route_config: ?Api.LoadedRouteConfig = brk: {
if (this_bundler.options.routes.routes_enabled) {
break :brk this_bundler.options.routes.toAPI();
@@ -176,6 +184,7 @@ pub const BunCommand = struct {
loaded_route_config,
filepath,
&estimated_input_lines_of_code_,
+ ctx.debug.package_bundle_map,
);
const estimated_input_lines_of_code = estimated_input_lines_of_code_;
diff --git a/src/defines.zig b/src/defines.zig
index 7259f41d2..84eda9c56 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -83,9 +83,9 @@ pub const DefineData = struct {
while (splitter.next()) |part| {
if (!js_lexer.isIdentifier(part)) {
if (strings.eql(part, entry.key_ptr)) {
- try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key_ptr.*});
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{s}\" must be a valid identifier", .{entry.key_ptr.*});
} else {
- try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.value_ptr.* });
+ try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.value_ptr.* });
}
break;
}
diff --git a/src/http.zig b/src/http.zig
index 59d90cacd..abcd60bdb 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -837,14 +837,7 @@ pub const RequestContext = struct {
const file_path_str = watchlist_slice.items(.file_path)[index];
const fd = watchlist_slice.items(.fd)[index];
const loader = watchlist_slice.items(.loader)[index];
- const macro_remappings = brk: {
- if (watchlist_slice.items(.package_json)[index]) |package_json| {
- break :brk package_json.macros;
- }
-
- break :brk MacroMap{};
- };
-
+ const macro_remappings = this.bundler.options.macro_remap;
const path = Fs.Path.init(file_path_str);
var old_log = this.bundler.log;
this.bundler.setLog(&log);
@@ -3348,8 +3341,6 @@ pub const Server = struct {
const dir_info = (this.bundler.resolver.readDirInfo(this.bundler.fs.top_level_dir) catch return) orelse return;
if (dir_info.package_json) |pkg| {
- Analytics.Features.macros = Analytics.Features.macros or pkg.macros.count() > 0;
- Analytics.Features.always_bundle = pkg.always_bundle.len > 0;
Analytics.setProjectID(dir_info.abs_path, pkg.name);
} else {
Analytics.setProjectID(dir_info.abs_path, "");
@@ -3391,6 +3382,8 @@ pub const Server = struct {
return;
}
+ server.bundler.options.macro_remap = debug.macros orelse .{};
+
if (debug.fallback_only or server.bundler.env.map.get("BUN_DISABLE_BUN_JS") != null) {
RequestContext.fallback_only = true;
RequestContext.JavaScriptHandler.javascript_disabled = true;
diff --git a/src/install/install.zig b/src/install/install.zig
index 595d62135..7620356b6 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -5448,9 +5448,9 @@ pub const PackageManager = struct {
clap.parseParam("-p, --production Don't install devDependencies") catch unreachable,
clap.parseParam("--no-save Don't save a lockfile") catch unreachable,
clap.parseParam("--dry-run Don't install anything") catch unreachable,
- clap.parseParam("--lockfile <STR> Store & load a lockfile at a specific filepath") catch unreachable,
+ clap.parseParam("--lockfile <PATH> Store & load a lockfile at a specific filepath") catch unreachable,
clap.parseParam("-f, --force Always request the latest versions from the registry & reinstall all dependenices") catch unreachable,
- clap.parseParam("--cache-dir <STR> Store & load cached data from a specific directory path") catch unreachable,
+ clap.parseParam("--cache-dir <PATH> Store & load cached data from a specific directory path") catch unreachable,
clap.parseParam("--no-cache Ignore manifest cache entirely") catch unreachable,
clap.parseParam("--silent Don't log anything") catch unreachable,
clap.parseParam("--verbose Excessively verbose logging") catch unreachable,
diff --git a/src/javascript/jsc/api/transpiler.zig b/src/javascript/jsc/api/transpiler.zig
index c301311b3..894287b7c 100644
--- a/src/javascript/jsc/api/transpiler.zig
+++ b/src/javascript/jsc/api/transpiler.zig
@@ -538,9 +538,8 @@ pub fn constructor(
.scan_pass_result = ScanPassResult.init(getAllocator(ctx)),
};
- transpiler.bundler.macro_context = JSAst.Macro.MacroContext.init(&transpiler.bundler);
if (transpiler_options.macro_map.count() > 0) {
- transpiler.bundler.macro_context.?.remap = transpiler_options.macro_map;
+ bundler.options.macro_remap = transpiler_options.macro_map;
}
return Class.make(ctx, transpiler);
diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig
index fe8aeb0c5..639736f91 100644
--- a/src/javascript/jsc/javascript.zig
+++ b/src/javascript/jsc/javascript.zig
@@ -1517,16 +1517,8 @@ pub const VirtualMachine = struct {
const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings or is_node_override)
MacroRemap{}
- else brk: {
- if (package_json) |pkg| {
- break :brk pkg.macros;
- }
-
- // TODO: find a way to pass the package_json through the resolve
- const resolve_result = vm.bundler.resolver.resolve(vm.bundler.fs.top_level_dir, specifier, .stmt) catch break :brk MacroRemap{};
-
- break :brk resolve_result.getMacroRemappings();
- };
+ else
+ vm.bundler.options.macro_remap;
var fallback_source: logger.Source = undefined;
@@ -1646,11 +1638,7 @@ pub const VirtualMachine = struct {
);
if (!vm.macro_mode) {
- vm.has_any_macro_remappings = vm.has_any_macro_remappings or brk: {
- if (result.package_json == null) break :brk false;
-
- break :brk result.package_json.?.macros.count() > 0;
- };
+ vm.has_any_macro_remappings = vm.has_any_macro_remappings or vm.bundler.options.macro_remap.count() > 0;
}
ret.result = result;
const result_path = result.pathConst() orelse return error.ModuleNotFound;
diff --git a/src/javascript/jsc/test/jest.zig b/src/javascript/jsc/test/jest.zig
index 2ffc8bd01..0c7392623 100644
--- a/src/javascript/jsc/test/jest.zig
+++ b/src/javascript/jsc/test/jest.zig
@@ -117,7 +117,7 @@ pub const TestRunner = struct {
pub const File = struct {
source: logger.Source = logger.Source.initEmptyFile(""),
- log: logger.Log = logger.Log.init(default_allocator),
+ log: logger.Log = logger.Log.initComptime(default_allocator),
module_scope: *DescribeScope = undefined,
pub const List = std.MultiArrayList(File);
diff --git a/src/js_ast.zig b/src/js_ast.zig
index c5dd9c56f..28f0dad85 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -1107,6 +1107,11 @@ pub const E = struct {
return @floatToInt(u32, @maximum(@trunc(self.value), 0));
}
+ pub inline fn toU16(self: Number) u16 {
+ @setRuntimeSafety(false);
+ return @floatToInt(u16, @maximum(@trunc(self.value), 0));
+ }
+
pub fn jsonStringify(self: *const Number, opts: anytype, o: anytype) !void {
return try std.json.stringify(self.value, opts, o);
}
@@ -1727,6 +1732,10 @@ pub const Expr = struct {
return false;
}
+ pub fn get(expr: *const Expr, name: string) ?Expr {
+ return if (asProperty(expr, name)) |query| query.expr else null;
+ }
+
// Making this comptime bloats the binary and doesn't seem to impact runtime performance.
pub fn asProperty(expr: *const Expr, name: string) ?Query {
if (std.meta.activeTag(expr.data) != .e_object) return null;
@@ -2310,6 +2319,48 @@ pub const Expr = struct {
// This should never make it to the printer
inline_identifier,
+ pub fn format(tag: Tag, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
+ try switch (tag) {
+ .e_string => writer.writeAll("string"),
+ .e_array => writer.writeAll("array"),
+ .e_unary => writer.writeAll("unary"),
+ .e_binary => writer.writeAll("binary"),
+ .e_boolean => writer.writeAll("boolean"),
+ .e_super => writer.writeAll("super"),
+ .e_null => writer.writeAll("null"),
+ .e_undefined => writer.writeAll("undefined"),
+ .e_new => writer.writeAll("new"),
+ .e_function => writer.writeAll("function"),
+ .e_new_target => writer.writeAll("new target"),
+ .e_import_meta => writer.writeAll("import.meta"),
+ .e_call => writer.writeAll("call"),
+ .e_dot => writer.writeAll("dot"),
+ .e_index => writer.writeAll("index"),
+ .e_arrow => writer.writeAll("arrow"),
+ .e_identifier => writer.writeAll("identifier"),
+ .e_import_identifier => writer.writeAll("import identifier"),
+ .e_private_identifier => writer.writeAll("#privateIdentifier"),
+ .e_jsx_element => writer.writeAll("<jsx>"),
+ .e_missing => writer.writeAll("<missing>"),
+ .e_number => writer.writeAll("number"),
+ .e_big_int => writer.writeAll("BigInt"),
+ .e_object => writer.writeAll("object"),
+ .e_spread => writer.writeAll("..."),
+ .e_template_part => writer.writeAll("template_part"),
+ .e_template => writer.writeAll("template"),
+ .e_reg_exp => writer.writeAll("regexp"),
+ .e_await => writer.writeAll("await"),
+ .e_yield => writer.writeAll("yield"),
+ .e_if => writer.writeAll("if"),
+ .e_require_or_require_resolve => writer.writeAll("require_or_require_resolve"),
+ .e_import => writer.writeAll("import"),
+ .e_this => writer.writeAll("this"),
+ .e_class => writer.writeAll("class"),
+ .e_require => writer.writeAll("require"),
+ else => writer.writeAll(@tagName(tag)),
+ };
+ }
+
pub fn jsonStringify(self: @This(), opts: anytype, o: anytype) !void {
return try std.json.stringify(@tagName(self), opts, o);
}
@@ -3822,7 +3873,7 @@ pub const Macro = struct {
.macros = MacroMap.init(default_allocator),
.resolver = &bundler.resolver,
.env = bundler.env,
- .remap = MacroRemap{},
+ .remap = bundler.options.macro_remap,
};
}
diff --git a/src/logger.zig b/src/logger.zig
index eb986c842..2ae8a8d8a 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -485,6 +485,8 @@ pub const Log = struct {
msgs: ArrayList(Msg),
level: Level = if (Environment.isDebug) Level.info else Level.warn,
+ pub var default_log_level = if (Environment.isDebug) Level.info else Level.warn;
+
pub fn hasAny(this: *const Log) bool {
return (this.warnings + this.errors) > 0;
}
@@ -510,11 +512,28 @@ pub const Log = struct {
info,
warn,
err,
+
+ pub const label: std.EnumArray(Level, string) = brk: {
+ var map = std.EnumArray(Level, string).initFill("");
+ map.set(Level.verbose, "verbose");
+ map.set(Level.debug, "debug");
+ map.set(Level.info, "info");
+ map.set(Level.warn, "warn");
+ map.set(Level.err, "error");
+ break :brk map;
+ };
};
pub fn init(allocator: std.mem.Allocator) Log {
return Log{
.msgs = ArrayList(Msg).init(allocator),
+ .level = default_log_level,
+ };
+ }
+
+ pub fn initComptime(allocator: std.mem.Allocator) Log {
+ return Log{
+ .msgs = ArrayList(Msg).init(allocator),
};
}
diff --git a/src/options.zig b/src/options.zig
index 2e2f684b6..4702506e1 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -27,7 +27,7 @@ const StoredFileDescriptorType = _global.StoredFileDescriptorType;
const JSC = @import("./jsc.zig");
const Analytics = @import("./analytics/analytics_thread.zig");
-
+const MacroRemap = @import("./resolver/package_json.zig").MacroMap;
const DotEnv = @import("./env_loader.zig");
const assert = std.debug.assert;
@@ -350,6 +350,13 @@ pub const ExternalModules = struct {
});
};
+pub const BundlePackage = enum {
+ always,
+ never,
+
+ pub const Map = std.StringArrayHashMapUnmanaged(BundlePackage);
+};
+
pub const ModuleType = enum {
unknown,
cjs,
@@ -651,8 +658,15 @@ pub const Loader = enum(u3) {
loader.toZigString(&zig_str, global);
if (zig_str.len == 0) return null;
+ return fromString(zig_str.slice()) orelse {
+ JSC.throwInvalidArguments("invalid loader – must be js, jsx, tsx, ts, css, file, or json", .{}, global.ref(), exception);
+ return null;
+ };
+ }
+
+ pub fn fromString(slice_: string) ?Loader {
const LoaderMatcher = strings.ExactSizeMatcher(4);
- var slice = zig_str.slice();
+ var slice = slice_;
if (slice.len > 0 and slice[0] == '.') {
slice = slice[1..];
}
@@ -665,10 +679,7 @@ pub const Loader = enum(u3) {
LoaderMatcher.case("css") => Loader.css,
LoaderMatcher.case("file") => Loader.file,
LoaderMatcher.case("json") => Loader.json,
- else => {
- JSC.throwInvalidArguments("invalid loader – must be js, jsx, tsx, ts, css, file, or json", .{}, global.ref(), exception);
- return null;
- },
+ else => null,
};
}
@@ -1086,6 +1097,8 @@ pub const BundleOptions = struct {
transform_options: Api.TransformOptions,
polyfill_node_globals: bool = true,
+ macro_remap: MacroRemap = MacroRemap{},
+
conditions: ESMConditions = undefined,
pub inline fn cssImportBehavior(this: *const BundleOptions) Api.CssInJsBehavior {
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index bbb697d92..ef56d3faa 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -119,12 +119,6 @@ pub const Result = struct {
file_fd: StoredFileDescriptorType = 0,
import_kind: ast.ImportKind = undefined,
- pub fn getMacroRemappings(this: *const Result) MacroRemap {
- const pkg = this.package_json orelse return MacroRemap{};
-
- return pkg.macros;
- }
-
pub fn path(this: *Result) ?*Path {
if (!this.path_pair.primary.is_disabled)
return &this.path_pair.primary;