aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--demos/css-stress-test/.env1
-rw-r--r--demos/css-stress-test/.env.development1
-rw-r--r--demos/css-stress-test/.env.local1
-rw-r--r--demos/css-stress-test/dotenv-load-check.ts6
-rw-r--r--demos/css-stress-test/nexty/package.json23
-rw-r--r--src/api/schema.d.ts38
-rw-r--r--src/api/schema.js153
-rw-r--r--src/api/schema.peechy28
-rw-r--r--src/api/schema.zig150
-rw-r--r--src/bundler.zig146
-rw-r--r--src/cli.zig22
-rw-r--r--src/defines.zig13
-rw-r--r--src/env_loader.zig393
-rw-r--r--src/fs.zig16
-rw-r--r--src/global.zig3
-rw-r--r--src/http.zig19
-rw-r--r--src/javascript/jsc/javascript.zig5
-rw-r--r--src/options.zig245
-rw-r--r--src/resolver/package_json.zig142
-rw-r--r--src/resolver/resolver.zig3
20 files changed, 1151 insertions, 257 deletions
diff --git a/demos/css-stress-test/.env b/demos/css-stress-test/.env
new file mode 100644
index 000000000..0b3ef03c1
--- /dev/null
+++ b/demos/css-stress-test/.env
@@ -0,0 +1 @@
+NEXT_PUBLIC_TEST=1 \ No newline at end of file
diff --git a/demos/css-stress-test/.env.development b/demos/css-stress-test/.env.development
new file mode 100644
index 000000000..81dc6216e
--- /dev/null
+++ b/demos/css-stress-test/.env.development
@@ -0,0 +1 @@
+SO_MANY_DOT_ENVS=true
diff --git a/demos/css-stress-test/.env.local b/demos/css-stress-test/.env.local
new file mode 100644
index 000000000..1723445f0
--- /dev/null
+++ b/demos/css-stress-test/.env.local
@@ -0,0 +1 @@
+NEXT_PUBLIC_TEST=100 \ No newline at end of file
diff --git a/demos/css-stress-test/dotenv-load-check.ts b/demos/css-stress-test/dotenv-load-check.ts
new file mode 100644
index 000000000..fb035ee07
--- /dev/null
+++ b/demos/css-stress-test/dotenv-load-check.ts
@@ -0,0 +1,6 @@
+console.log(
+ "process.env.__NEXT_TRAILING_SLASH is set to",
+ process.env.__NEXT_TRAILING_SLASH ? true : false
+);
+
+console.log("SO_MANY_DOT_ENVS is:", SO_MANY_DOT_ENVS);
diff --git a/demos/css-stress-test/nexty/package.json b/demos/css-stress-test/nexty/package.json
index e2394eec5..d18211113 100644
--- a/demos/css-stress-test/nexty/package.json
+++ b/demos/css-stress-test/nexty/package.json
@@ -3,6 +3,8 @@
"version": "1.0.0",
"description": "",
"framework": {
+ "static": "public",
+ "assetPrefix": "_next/",
"router": {
"dir": "pages",
"extensions": [
@@ -11,11 +13,26 @@
".tsx"
]
},
- "static": "public",
- "assetPrefix": "_next/",
"development": {
"client": "client.development.tsx",
- "server": "server.development.tsx"
+ "server": "server.development.tsx",
+ "define": {
+ "client": {
+ ".env": "NEXT_PUBLIC_",
+ "defaults": {
+ "process.env.__NEXT_TRAILING_SLASH": "false"
+ }
+ },
+ "server": {
+ ".env": "*",
+ "defaults": {
+ "process.env.__NEXT_TRAILING_SLASH": "false",
+ "process.env.__NEXT_OPTIMIZE_FONTS": "false",
+ "process.env.__NEXT_OPTIMIZE_IMAGES": "false",
+ "process.env.__NEXT_OPTIMIZE_CSS": "false"
+ }
+ }
+ }
},
"production": {
"client": "client.production.tsx",
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index 1077a34f2..0b80700c5 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -96,6 +96,19 @@ type uint32 = number;
2: "require",
require: "require"
}
+ export enum DotEnvBehavior {
+ disable = 1,
+ prefix = 2,
+ load_all = 3
+ }
+ export const DotEnvBehaviorKeys = {
+ 1: "disable",
+ disable: "disable",
+ 2: "prefix",
+ prefix: "prefix",
+ 3: "load_all",
+ load_all: "load_all"
+ }
export enum TransformResponseStatus {
success = 1,
fail = 2
@@ -236,15 +249,24 @@ type uint32 = number;
loaders: Loader[];
}
+ export interface EnvConfig {
+ prefix?: string;
+ defaults?: StringMap;
+ }
+
+ export interface LoadedEnvConfig {
+ dotenv: DotEnvBehavior;
+ defaults: StringMap;
+ prefix: string;
+ }
+
export interface FrameworkConfig {
package?: string;
client?: string;
server?: string;
development?: boolean;
- client_defines?: StringMap;
- server_defines?: StringMap;
- client_defines_prefix?: string;
- server_defines_prefix?: string;
+ client_env?: EnvConfig;
+ server_env?: EnvConfig;
}
export interface LoadedFramework {
@@ -252,9 +274,7 @@ type uint32 = number;
package: string;
development: boolean;
client: boolean;
- define_defaults: StringMap;
- define_prefix: string;
- has_define_prefix: boolean;
+ env: LoadedEnvConfig;
}
export interface LoadedRouteConfig {
@@ -446,6 +466,10 @@ type uint32 = number;
export declare function decodeStringMap(buffer: ByteBuffer): StringMap;
export declare function encodeLoaderMap(message: LoaderMap, bb: ByteBuffer): void;
export declare function decodeLoaderMap(buffer: ByteBuffer): LoaderMap;
+ export declare function encodeEnvConfig(message: EnvConfig, bb: ByteBuffer): void;
+ export declare function decodeEnvConfig(buffer: ByteBuffer): EnvConfig;
+ export declare function encodeLoadedEnvConfig(message: LoadedEnvConfig, bb: ByteBuffer): void;
+ export declare function decodeLoadedEnvConfig(buffer: ByteBuffer): LoadedEnvConfig;
export declare function encodeFrameworkConfig(message: FrameworkConfig, bb: ByteBuffer): void;
export declare function decodeFrameworkConfig(buffer: ByteBuffer): FrameworkConfig;
export declare function encodeLoadedFramework(message: LoadedFramework, bb: ByteBuffer): void;
diff --git a/src/api/schema.js b/src/api/schema.js
index 178d95662..3a76caedf 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -588,6 +588,97 @@ bb.writeByte(encoded);
}
}
+const DotEnvBehavior = {
+ "1": 1,
+ "2": 2,
+ "3": 3,
+ "disable": 1,
+ "prefix": 2,
+ "load_all": 3
+};
+const DotEnvBehaviorKeys = {
+ "1": "disable",
+ "2": "prefix",
+ "3": "load_all",
+ "disable": "disable",
+ "prefix": "prefix",
+ "load_all": "load_all"
+};
+
+function decodeEnvConfig(bb) {
+ var result = {};
+
+ while (true) {
+ switch (bb.readByte()) {
+ case 0:
+ return result;
+
+ case 1:
+ result["prefix"] = bb.readString();
+ break;
+
+ case 2:
+ result["defaults"] = decodeStringMap(bb);
+ break;
+
+ default:
+ throw new Error("Attempted to parse invalid message");
+ }
+ }
+}
+
+function encodeEnvConfig(message, bb) {
+
+ var value = message["prefix"];
+ if (value != null) {
+ bb.writeByte(1);
+ bb.writeString(value);
+ }
+
+ var value = message["defaults"];
+ if (value != null) {
+ bb.writeByte(2);
+ encodeStringMap(value, bb);
+ }
+ bb.writeByte(0);
+
+}
+
+function decodeLoadedEnvConfig(bb) {
+ var result = {};
+
+ result["dotenv"] = DotEnvBehavior[bb.readVarUint()];
+ result["defaults"] = decodeStringMap(bb);
+ result["prefix"] = bb.readString();
+ return result;
+}
+
+function encodeLoadedEnvConfig(message, bb) {
+
+ var value = message["dotenv"];
+ if (value != null) {
+ var encoded = DotEnvBehavior[value];
+if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + " for enum \"DotEnvBehavior\"");
+bb.writeVarUint(encoded);
+ } else {
+ throw new Error("Missing required field \"dotenv\"");
+ }
+
+ var value = message["defaults"];
+ if (value != null) {
+ encodeStringMap(value, bb);
+ } else {
+ throw new Error("Missing required field \"defaults\"");
+ }
+
+ var value = message["prefix"];
+ if (value != null) {
+ bb.writeString(value);
+ } else {
+ throw new Error("Missing required field \"prefix\"");
+ }
+
+}
function decodeFrameworkConfig(bb) {
var result = {};
@@ -614,19 +705,11 @@ function decodeFrameworkConfig(bb) {
break;
case 5:
- result["client_defines"] = decodeStringMap(bb);
+ result["client_env"] = decodeEnvConfig(bb);
break;
case 6:
- result["server_defines"] = decodeStringMap(bb);
- break;
-
- case 7:
- result["client_defines_prefix"] = bb.readString();
- break;
-
- case 8:
- result["server_defines_prefix"] = bb.readString();
+ result["server_env"] = decodeEnvConfig(bb);
break;
default:
@@ -661,28 +744,16 @@ function encodeFrameworkConfig(message, bb) {
bb.writeByte(value);
}
- var value = message["client_defines"];
+ var value = message["client_env"];
if (value != null) {
bb.writeByte(5);
- encodeStringMap(value, bb);
+ encodeEnvConfig(value, bb);
}
- var value = message["server_defines"];
+ var value = message["server_env"];
if (value != null) {
bb.writeByte(6);
- encodeStringMap(value, bb);
- }
-
- var value = message["client_defines_prefix"];
- if (value != null) {
- bb.writeByte(7);
- bb.writeString(value);
- }
-
- var value = message["server_defines_prefix"];
- if (value != null) {
- bb.writeByte(8);
- bb.writeString(value);
+ encodeEnvConfig(value, bb);
}
bb.writeByte(0);
@@ -695,9 +766,7 @@ function decodeLoadedFramework(bb) {
result["package"] = bb.readString();
result["development"] = !!bb.readByte();
result["client"] = !!bb.readByte();
- result["define_defaults"] = decodeStringMap(bb);
- result["define_prefix"] = bb.readString();
- result["has_define_prefix"] = !!bb.readByte();
+ result["env"] = decodeLoadedEnvConfig(bb);
return result;
}
@@ -731,25 +800,11 @@ function encodeLoadedFramework(message, bb) {
throw new Error("Missing required field \"client\"");
}
- var value = message["define_defaults"];
+ var value = message["env"];
if (value != null) {
- encodeStringMap(value, bb);
- } else {
- throw new Error("Missing required field \"define_defaults\"");
- }
-
- var value = message["define_prefix"];
- if (value != null) {
- bb.writeString(value);
- } else {
- throw new Error("Missing required field \"define_prefix\"");
- }
-
- var value = message["has_define_prefix"];
- if (value != null) {
- bb.writeByte(value);
+ encodeLoadedEnvConfig(value, bb);
} else {
- throw new Error("Missing required field \"has_define_prefix\"");
+ throw new Error("Missing required field \"env\"");
}
}
@@ -2099,6 +2154,12 @@ export { decodeStringMap }
export { encodeStringMap }
export { decodeLoaderMap }
export { encodeLoaderMap }
+export { DotEnvBehavior }
+export { DotEnvBehaviorKeys }
+export { decodeEnvConfig }
+export { encodeEnvConfig }
+export { decodeLoadedEnvConfig }
+export { encodeLoadedEnvConfig }
export { decodeFrameworkConfig }
export { encodeFrameworkConfig }
export { decodeLoadedFramework }
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index 864c6b114..431fab3c1 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -135,15 +135,31 @@ struct LoaderMap {
Loader[] loaders;
}
+enum DotEnvBehavior {
+ disable = 1;
+ prefix = 2;
+ load_all = 3;
+}
+
+message EnvConfig {
+ string prefix = 1;
+ StringMap defaults = 2;
+}
+
+struct LoadedEnvConfig {
+ DotEnvBehavior dotenv;
+
+ StringMap defaults;
+ string prefix;
+}
+
message FrameworkConfig {
string package = 1;
string client = 2;
string server = 3;
bool development = 4;
- StringMap client_defines = 5;
- StringMap server_defines = 6;
- string client_defines_prefix = 7;
- string server_defines_prefix = 8;
+ EnvConfig client_env = 5;
+ EnvConfig server_env = 6;
}
struct LoadedFramework {
@@ -151,9 +167,7 @@ struct LoadedFramework {
string package;
bool development;
bool client;
- StringMap define_defaults;
- string define_prefix;
- bool has_define_prefix;
+ LoadedEnvConfig env;
}
struct LoadedRouteConfig {
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 344a60a28..ffb949a1f 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -779,6 +779,98 @@ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
};
+pub const DotEnvBehavior = enum(u32) {
+
+_none,
+ /// disable
+ disable,
+
+ /// prefix
+ prefix,
+
+ /// load_all
+ load_all,
+
+_,
+
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
+
+
+};
+
+pub const EnvConfig = struct {
+/// prefix
+prefix: ?[]const u8 = null,
+
+/// defaults
+defaults: ?StringMap = null,
+
+
+pub fn decode(reader: anytype) anyerror!EnvConfig {
+ var this = std.mem.zeroes(EnvConfig);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.prefix = try reader.readValue([]const u8);
+},
+ 2 => {
+ this.defaults = try reader.readValue(StringMap);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.prefix) |prefix| {
+ try writer.writeFieldID(1);
+ try writer.writeValue(prefix);
+}
+if (this.defaults) |defaults| {
+ try writer.writeFieldID(2);
+ try writer.writeValue(defaults);
+}
+try writer.endMessage();
+}
+
+};
+
+pub const LoadedEnvConfig = struct {
+/// dotenv
+dotenv: DotEnvBehavior,
+
+/// defaults
+defaults: StringMap,
+
+/// prefix
+prefix: []const u8,
+
+
+pub fn decode(reader: anytype) anyerror!LoadedEnvConfig {
+ var this = std.mem.zeroes(LoadedEnvConfig);
+
+ this.dotenv = try reader.readValue(DotEnvBehavior);
+ this.defaults = try reader.readValue(StringMap);
+ this.prefix = try reader.readValue([]const u8);
+ return this;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeEnum(this.dotenv);
+ try writer.writeValue(this.defaults);
+ try writer.writeValue(this.prefix);
+}
+
+};
+
pub const FrameworkConfig = struct {
/// package
package: ?[]const u8 = null,
@@ -792,17 +884,11 @@ server: ?[]const u8 = null,
/// development
development: ?bool = null,
-/// client_defines
-client_defines: ?StringMap = null,
+/// client_env
+client_env: ?EnvConfig = null,
-/// server_defines
-server_defines: ?StringMap = null,
-
-/// client_defines_prefix
-client_defines_prefix: ?[]const u8 = null,
-
-/// server_defines_prefix
-server_defines_prefix: ?[]const u8 = null,
+/// server_env
+server_env: ?EnvConfig = null,
pub fn decode(reader: anytype) anyerror!FrameworkConfig {
@@ -825,16 +911,10 @@ pub fn decode(reader: anytype) anyerror!FrameworkConfig {
this.development = try reader.readValue(bool);
},
5 => {
- this.client_defines = try reader.readValue(StringMap);
+ this.client_env = try reader.readValue(EnvConfig);
},
6 => {
- this.server_defines = try reader.readValue(StringMap);
-},
- 7 => {
- this.client_defines_prefix = try reader.readValue([]const u8);
-},
- 8 => {
- this.server_defines_prefix = try reader.readValue([]const u8);
+ this.server_env = try reader.readValue(EnvConfig);
},
else => {
return error.InvalidMessage;
@@ -861,21 +941,13 @@ if (this.development) |development| {
try writer.writeFieldID(4);
try writer.writeInt(@intCast(u8, @boolToInt(development)));
}
-if (this.client_defines) |client_defines| {
+if (this.client_env) |client_env| {
try writer.writeFieldID(5);
- try writer.writeValue(client_defines);
+ try writer.writeValue(client_env);
}
-if (this.server_defines) |server_defines| {
+if (this.server_env) |server_env| {
try writer.writeFieldID(6);
- try writer.writeValue(server_defines);
-}
-if (this.client_defines_prefix) |client_defines_prefix| {
- try writer.writeFieldID(7);
- try writer.writeValue(client_defines_prefix);
-}
-if (this.server_defines_prefix) |server_defines_prefix| {
- try writer.writeFieldID(8);
- try writer.writeValue(server_defines_prefix);
+ try writer.writeValue(server_env);
}
try writer.endMessage();
}
@@ -895,14 +967,8 @@ development: bool = false,
/// client
client: bool = false,
-/// define_defaults
-define_defaults: StringMap,
-
-/// define_prefix
-define_prefix: []const u8,
-
-/// has_define_prefix
-has_define_prefix: bool = false,
+/// env
+env: LoadedEnvConfig,
pub fn decode(reader: anytype) anyerror!LoadedFramework {
@@ -912,9 +978,7 @@ pub fn decode(reader: anytype) anyerror!LoadedFramework {
this.package = try reader.readValue([]const u8);
this.development = try reader.readValue(bool);
this.client = try reader.readValue(bool);
- this.define_defaults = try reader.readValue(StringMap);
- this.define_prefix = try reader.readValue([]const u8);
- this.has_define_prefix = try reader.readValue(bool);
+ this.env = try reader.readValue(LoadedEnvConfig);
return this;
}
@@ -923,9 +987,7 @@ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
try writer.writeValue(this.package);
try writer.writeInt(@intCast(u8, @boolToInt(this.development)));
try writer.writeInt(@intCast(u8, @boolToInt(this.client)));
- try writer.writeValue(this.define_defaults);
- try writer.writeValue(this.define_prefix);
- try writer.writeInt(@intCast(u8, @boolToInt(this.has_define_prefix)));
+ try writer.writeValue(this.env);
}
};
diff --git a/src/bundler.zig b/src/bundler.zig
index 377079355..4fba107c0 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -33,7 +33,7 @@ const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
const Router = @import("./router.zig");
const isPackagePath = _resolver.isPackagePath;
const Css = @import("css_scanner.zig");
-
+const DotEnv = @import("./env_loader.zig");
pub const ServeResult = struct {
file: options.OutputFile,
mime_type: MimeType,
@@ -244,6 +244,7 @@ pub fn NewBundler(cache_files: bool) type {
linker: Linker,
timer: Timer = Timer{},
+ env: *DotEnv.Loader,
// must be pointer array because we can't we don't want the source to point to invalid memory if the array size is reallocated
virtual_modules: std.ArrayList(*ClientEntryPoint),
@@ -259,6 +260,7 @@ pub fn NewBundler(cache_files: bool) type {
log: *logger.Log,
opts: Api.TransformOptions,
existing_bundle: ?*NodeModuleBundle,
+ env_loader_: ?*DotEnv.Loader,
) !ThisBundler {
js_ast.Expr.Data.Store.create(allocator);
js_ast.Stmt.Data.Store.create(allocator);
@@ -274,6 +276,14 @@ pub fn NewBundler(cache_files: bool) type {
existing_bundle,
);
+ var env_loader = env_loader_ orelse brk: {
+ var map = try allocator.create(DotEnv.Map);
+ map.* = DotEnv.Map.init(allocator);
+
+ var loader = try allocator.create(DotEnv.Loader);
+ loader.* = DotEnv.Loader.init(map, allocator);
+ break :brk loader;
+ };
// var pool = try allocator.create(ThreadPool);
// try pool.init(ThreadPool.InitConfig{
// .allocator = allocator,
@@ -293,6 +303,7 @@ pub fn NewBundler(cache_files: bool) type {
.resolve_queue = ResolveQueue.init(allocator),
.output_files = std.ArrayList(options.OutputFile).init(allocator),
.virtual_modules = std.ArrayList(*ClientEntryPoint).init(allocator),
+ .env = env_loader,
};
}
@@ -308,16 +319,64 @@ pub fn NewBundler(cache_files: bool) type {
);
}
- pub fn configureFramework(this: *ThisBundler) !void {
+ pub fn runEnvLoader(this: *ThisBundler) !void {
+ switch (this.options.env.behavior) {
+ .prefix, .load_all => {
+ // Step 1. Load the project root.
+ var dir: *Fs.FileSystem.DirEntry = ((this.resolver.readDirInfo(this.fs.top_level_dir) catch return) orelse return).getEntries() orelse return;
+
+ // Process always has highest priority.
+ this.env.loadProcess();
+ if (this.options.production) {
+ try this.env.load(&this.fs.fs, dir, false);
+ } else {
+ try this.env.load(&this.fs.fs, dir, true);
+ }
+ },
+ else => {},
+ }
+ }
+
+ // This must be run after a framework is configured, if a framework is enabled
+ pub fn configureDefines(this: *ThisBundler) !void {
+ if (this.options.defines_loaded) {
+ return;
+ }
+
+ try this.runEnvLoader();
+
+ if (this.options.framework) |framework| {
+ if (this.options.platform.isClient()) {
+ try this.options.loadDefines(this.allocator, this.env, &framework.client_env);
+ } else {
+ try this.options.loadDefines(this.allocator, this.env, &framework.server_env);
+ }
+ } else {
+ try this.options.loadDefines(this.allocator, this.env, &this.options.env);
+ }
+ }
+
+ pub fn configureFramework(
+ this: *ThisBundler,
+ comptime load_defines: bool,
+ ) !void {
if (this.options.framework) |*framework| {
if (framework.needsResolveFromPackage()) {
var route_config = this.options.routes;
var pair = PackageJSON.FrameworkRouterPair{ .framework = framework, .router = &route_config };
if (framework.development) {
- try this.resolver.resolveFramework(framework.package, &pair, .development);
+ try this.resolver.resolveFramework(framework.package, &pair, .development, load_defines);
} else {
- try this.resolver.resolveFramework(framework.package, &pair, .production);
+ try this.resolver.resolveFramework(framework.package, &pair, .production, load_defines);
+ }
+
+ if (this.options.areDefinesUnset()) {
+ if (this.options.platform.isClient()) {
+ this.options.env = framework.client_env;
+ } else {
+ this.options.env = framework.server_env;
+ }
}
if (pair.loaded_routes) {
@@ -333,7 +392,7 @@ pub fn NewBundler(cache_files: bool) type {
pub fn configureFrameworkWithResolveResult(this: *ThisBundler, comptime client: bool) !?_resolver.Result {
if (this.options.framework != null) {
- try this.configureFramework();
+ try this.configureFramework(true);
if (comptime client) {
if (this.options.framework.?.client.len > 0) {
return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.client, .stmt);
@@ -348,8 +407,13 @@ pub fn NewBundler(cache_files: bool) type {
return null;
}
- pub fn configureRouter(this: *ThisBundler) !void {
- try this.configureFramework();
+ pub fn configureRouter(this: *ThisBundler, comptime load_defines: bool) !void {
+ try this.configureFramework(load_defines);
+ defer {
+ if (load_defines) {
+ this.configureDefines() catch {};
+ }
+ }
// if you pass just a directory, activate the router configured for the pages directory
// for now:
@@ -1683,7 +1747,6 @@ pub fn NewBundler(cache_files: bool) type {
// We try to be mostly stateless when serving
// This means we need a slightly different resolver setup
- // Essentially:
pub fn buildFile(
bundler: *ThisBundler,
log: *logger.Log,
@@ -1929,15 +1992,18 @@ pub fn NewBundler(cache_files: bool) type {
log: *logger.Log,
opts: Api.TransformOptions,
) !options.TransformResult {
- var bundler = try ThisBundler.init(allocator, log, opts, null);
+ var bundler = try ThisBundler.init(allocator, log, opts, null, null);
bundler.configureLinker();
- try bundler.configureRouter();
+ try bundler.configureRouter(false);
+ try bundler.configureDefines();
var skip_normalize = false;
- if (bundler.options.routes.routes_enabled) {
+ var load_from_routes = false;
+ if (bundler.options.routes.routes_enabled and bundler.options.entry_points.len == 0) {
if (bundler.router) |router| {
bundler.options.entry_points = try router.getEntryPoints(allocator);
skip_normalize = true;
+ load_from_routes = true;
}
}
@@ -1964,16 +2030,18 @@ pub fn NewBundler(cache_files: bool) type {
if (bundler.options.output_dir_handle == null) {
const outstream = std.io.getStdOut();
- if (bundler.options.framework) |*framework| {
- if (framework.client.len > 0) {
- did_start = true;
- try switch (bundler.options.import_path_format) {
- .relative => bundler.processResolveQueue(.relative, true, @TypeOf(outstream), outstream),
- .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, @TypeOf(outstream), outstream),
- .absolute_url => bundler.processResolveQueue(.absolute_url, true, @TypeOf(outstream), outstream),
- .absolute_path => bundler.processResolveQueue(.absolute_path, true, @TypeOf(outstream), outstream),
- .package_path => bundler.processResolveQueue(.package_path, true, @TypeOf(outstream), outstream),
- };
+ if (load_from_routes) {
+ if (bundler.options.framework) |*framework| {
+ if (framework.client.len > 0) {
+ did_start = true;
+ try switch (bundler.options.import_path_format) {
+ .relative => bundler.processResolveQueue(.relative, true, @TypeOf(outstream), outstream),
+ .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, @TypeOf(outstream), outstream),
+ .absolute_url => bundler.processResolveQueue(.absolute_url, true, @TypeOf(outstream), outstream),
+ .absolute_path => bundler.processResolveQueue(.absolute_path, true, @TypeOf(outstream), outstream),
+ .package_path => bundler.processResolveQueue(.package_path, true, @TypeOf(outstream), outstream),
+ };
+ }
}
}
@@ -1993,16 +2061,18 @@ pub fn NewBundler(cache_files: bool) type {
Global.crash();
};
- if (bundler.options.framework) |*framework| {
- if (framework.client.len > 0) {
- did_start = true;
- try switch (bundler.options.import_path_format) {
- .relative => bundler.processResolveQueue(.relative, true, std.fs.Dir, output_dir),
- .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, std.fs.Dir, output_dir),
- .absolute_url => bundler.processResolveQueue(.absolute_url, true, std.fs.Dir, output_dir),
- .absolute_path => bundler.processResolveQueue(.absolute_path, true, std.fs.Dir, output_dir),
- .package_path => bundler.processResolveQueue(.package_path, true, std.fs.Dir, output_dir),
- };
+ if (load_from_routes) {
+ if (bundler.options.framework) |*framework| {
+ if (framework.client.len > 0) {
+ did_start = true;
+ try switch (bundler.options.import_path_format) {
+ .relative => bundler.processResolveQueue(.relative, true, std.fs.Dir, output_dir),
+ .relative_nodejs => bundler.processResolveQueue(.relative_nodejs, true, std.fs.Dir, output_dir),
+ .absolute_url => bundler.processResolveQueue(.absolute_url, true, std.fs.Dir, output_dir),
+ .absolute_path => bundler.processResolveQueue(.absolute_path, true, std.fs.Dir, output_dir),
+ .package_path => bundler.processResolveQueue(.package_path, true, std.fs.Dir, output_dir),
+ };
+ }
}
}
@@ -2030,8 +2100,8 @@ pub fn NewBundler(cache_files: bool) type {
}
if (FeatureFlags.tracing) {
- Output.printError(
- "\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n",
+ Output.prettyErrorln(
+ "<r><d>\n---Tracing---\nResolve time: {d}\nParsing time: {d}\n---Tracing--\n\n<r>",
.{
bundler.resolver.elapsed,
bundler.elapsed,
@@ -2128,7 +2198,15 @@ pub const Transformer = struct {
js_ast.Stmt.Data.Store.create(allocator);
const platform = options.Platform.from(opts.platform);
- var define = try options.definesFromTransformOptions(allocator, log, opts.define, false, platform);
+ var define = try options.definesFromTransformOptions(
+ allocator,
+ log,
+ opts.define,
+ false,
+ platform,
+ null,
+ null,
+ );
const cwd = if (opts.absolute_working_dir) |workdir| try std.fs.realpathAlloc(allocator, workdir) else try std.process.getCwdAlloc(allocator);
diff --git a/src/cli.zig b/src/cli.zig
index 8d6644930..da4186a67 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -21,6 +21,7 @@ const configureTransformOptionsForSpeedy = @import("./javascript/jsc/config.zig"
const clap = @import("clap");
const bundler = @import("bundler.zig");
+const DotEnv = @import("./env_loader.zig");
const fs = @import("fs.zig");
const Router = @import("./router.zig");
@@ -407,11 +408,11 @@ pub const Cli = struct {
var log_ = try allocator.create(logger.Log);
log_.* = log;
- var this_bundler = try bundler.ServeBundler.init(allocator, log_, args, null);
+ var this_bundler = try bundler.ServeBundler.init(allocator, log_, args, null, null);
this_bundler.configureLinker();
var filepath: [*:0]const u8 = "node_modules.jsb";
var server_bundle_filepath: [*:0]const u8 = "node_modules.server.jsb";
- try this_bundler.configureRouter();
+ try this_bundler.configureRouter(true);
var loaded_route_config: ?Api.LoadedRouteConfig = brk: {
if (this_bundler.options.routes.routes_enabled) {
@@ -425,7 +426,7 @@ pub const Cli = struct {
}
break :brk null;
};
-
+ var env_loader = this_bundler.env;
wait_group = sync.WaitGroup.init();
var server_bundler_generator_thread: ?std.Thread = null;
if (this_bundler.options.framework) |*framework| {
@@ -433,6 +434,7 @@ pub const Cli = struct {
const ServerBundleGeneratorThread = struct {
inline fn _generate(
logs: *logger.Log,
+ env_loader_: *DotEnv.Loader,
allocator_: *std.mem.Allocator,
transform_args: Api.TransformOptions,
_filepath: [*:0]const u8,
@@ -440,9 +442,16 @@ pub const Cli = struct {
route_conf_: ?Api.LoadedRouteConfig,
router: ?Router,
) !void {
- var server_bundler = try bundler.ServeBundler.init(allocator_, logs, try configureTransformOptionsForSpeedy(allocator_, transform_args), null);
+ var server_bundler = try bundler.ServeBundler.init(
+ allocator_,
+ logs,
+ try configureTransformOptionsForSpeedy(allocator_, transform_args),
+ null,
+ env_loader_,
+ );
server_bundler.configureLinker();
server_bundler.router = router;
+ try server_bundler.configureDefines();
_ = try bundler.ServeBundler.GenerateNodeModuleBundle.generate(
&server_bundler,
allocator_,
@@ -454,6 +463,7 @@ pub const Cli = struct {
}
pub fn generate(
logs: *logger.Log,
+ env_loader_: *DotEnv.Loader,
transform_args: Api.TransformOptions,
_filepath: [*:0]const u8,
server_conf: Api.LoadedFramework,
@@ -477,7 +487,7 @@ pub const Cli = struct {
}
}
- _generate(logs, std.heap.c_allocator, transform_args, _filepath, server_conf, route_conf_, router) catch return;
+ _generate(logs, env_loader_, std.heap.c_allocator, transform_args, _filepath, server_conf, route_conf_, router) catch return;
}
};
@@ -488,6 +498,7 @@ pub const Cli = struct {
ServerBundleGeneratorThread.generate,
.{
log_,
+ env_loader,
args,
server_bundle_filepath,
_server_conf,
@@ -498,6 +509,7 @@ pub const Cli = struct {
} else {
ServerBundleGeneratorThread.generate(
log_,
+ env_loader,
args,
server_bundle_filepath,
_server_conf,
diff --git a/src/defines.zig b/src/defines.zig
index a0e3bc8ea..c3869c166 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -25,7 +25,7 @@ const Globals = struct {
};
const defines_path = fs.Path.initWithNamespace("defines.json", "internal");
-pub const RawDefines = std.StringHashMap(string);
+pub const RawDefines = std.StringArrayHashMap(string);
pub const UserDefines = std.StringHashMap(DefineData);
pub const DefineData = struct {
@@ -59,10 +59,8 @@ pub const DefineData = struct {
};
}
- pub fn from_input(defines: RawDefines, log: *logger.Log, allocator: *std.mem.Allocator) !UserDefines {
- var user_defines = UserDefines.init(allocator);
- try user_defines.ensureCapacity(defines.count());
-
+ pub fn from_mergable_input(defines: RawDefines, user_defines: *UserDefines, log: *logger.Log, allocator: *std.mem.Allocator) !void {
+ try user_defines.ensureUnusedCapacity(@truncate(u32, defines.count()));
var iter = defines.iterator();
while (iter.next()) |entry| {
var splitter = std.mem.split(entry.key_ptr.*, ".");
@@ -153,6 +151,11 @@ pub const DefineData = struct {
.value = data,
});
}
+ }
+
+ pub fn from_input(defines: RawDefines, log: *logger.Log, allocator: *std.mem.Allocator) !UserDefines {
+ var user_defines = UserDefines.init(allocator);
+ try from_mergable_input(defines, &user_defines, log, allocator);
return user_defines;
}
diff --git a/src/env_loader.zig b/src/env_loader.zig
index 4e31edbfd..1a0556455 100644
--- a/src/env_loader.zig
+++ b/src/env_loader.zig
@@ -2,7 +2,8 @@ const std = @import("std");
const logger = @import("./logger.zig");
usingnamespace @import("./global.zig");
const CodepointIterator = @import("./string_immutable.zig").CodepointIterator;
-
+const Fs = @import("./fs.zig");
+const Api = @import("./api/schema.zig").Api;
const Variable = struct {
key: string,
value: string,
@@ -44,7 +45,7 @@ pub const Lexer = struct {
comptime Writer: type,
writer: Writer,
variable: Variable,
- getter: fn (ctx: *const ContextType, key: string) ?string,
+ comptime getter: fn (ctx: *const ContextType, key: string) ?string,
) !void {
var i: usize = 0;
var last_flush: usize = 0;
@@ -70,7 +71,7 @@ pub const Lexer = struct {
last_flush = i;
const name = variable.value[start..i];
- if (getter(ctx, name)) |new_value| {
+ if (@call(.{ .modifier = .always_inline }, getter, .{ ctx, name })) |new_value| {
if (new_value.len > 0) {
try writer.writeAll(new_value);
}
@@ -328,27 +329,307 @@ pub const Lexer = struct {
}
};
-pub const Parser = struct {
- pub fn parse(source: *const logger.Source, allocator: *std.mem.Allocator) Map {
- var map = Map.init(allocator);
+pub const Loader = struct {
+ map: *Map,
+ allocator: *std.mem.Allocator,
+
+ @".env.local": ?logger.Source = null,
+ @".env.development": ?logger.Source = null,
+ @".env.production": ?logger.Source = null,
+ @".env": ?logger.Source = null,
+
+ did_load_process: bool = false,
+
+ const empty_string_value: string = "\"\"";
+
+ pub fn copyForDefine(
+ this: *Loader,
+ comptime Type: type,
+ to: *Type,
+ framework_defaults: Api.StringMap,
+ behavior: Api.DotEnvBehavior,
+ prefix: string,
+ allocator: *std.mem.Allocator,
+ ) ![]u8 {
+ var iter = this.map.iter();
+ var key_count: usize = 0;
+ var string_map_hashes = try allocator.alloc(u64, framework_defaults.keys.len);
+ defer allocator.free(string_map_hashes);
+ const invalid_hash = std.math.maxInt(u64) - 1;
+ std.mem.set(u64, string_map_hashes, invalid_hash);
+
+ var key_buf: []u8 = "";
+ // Frameworks determine an allowlist of values
+
+ for (framework_defaults.keys) |key, i| {
+ if (key.len > "process.env.".len and strings.eqlComptime(key[0.."process.env.".len], "process.env.")) {
+ const hashable_segment = key["process.env.".len..];
+ string_map_hashes[i] = std.hash.Wyhash.hash(0, hashable_segment);
+ }
+ }
+
+ // We have to copy all the keys to prepend "process.env" :/
+ var key_buf_len: usize = 0;
+
+ if (behavior != .disable) {
+ if (behavior == .prefix) {
+ std.debug.assert(prefix.len > 0);
+
+ while (iter.next()) |entry| {
+ if (strings.startsWith(entry.key_ptr.*, prefix)) {
+ key_buf_len += entry.key_ptr.len;
+ key_count += 1;
+ std.debug.assert(entry.key_ptr.len > 0);
+ }
+ }
+ } else {
+ while (iter.next()) |entry| {
+ key_buf_len += entry.key_ptr.len;
+ key_count += 1;
+ std.debug.assert(entry.key_ptr.len > 0);
+ }
+ }
+
+ if (key_buf_len > 0) {
+ iter.reset();
+ key_buf = try allocator.alloc(u8, key_buf_len + key_count * "process.env.".len);
+ errdefer allocator.free(key_buf);
+ var key_fixed_allocator = std.heap.FixedBufferAllocator.init(key_buf);
+ var key_allocator = &key_fixed_allocator.allocator;
+
+ if (behavior == .prefix) {
+ while (iter.next()) |entry| {
+ const value: string = if (entry.value_ptr.*.len == 0) empty_string_value else entry.value_ptr.*;
+
+ if (strings.startsWith(entry.key_ptr.*, prefix)) {
+ _ = try to.getOrPutValue(
+ std.fmt.allocPrint(key_allocator, "process.env.{s}", .{entry.key_ptr.*}) catch unreachable,
+ value,
+ );
+ } else {
+ const hash = std.hash.Wyhash.hash(0, entry.key_ptr.*);
+
+ std.debug.assert(hash != invalid_hash);
+
+ if (std.mem.indexOfScalar(u64, string_map_hashes, hash)) |key_i| {
+ _ = try to.getOrPutValue(
+ framework_defaults.keys[key_i],
+ value,
+ );
+ }
+ }
+ }
+ } else {
+ while (iter.next()) |entry| {
+ const value: string = if (entry.value_ptr.*.len == 0) empty_string_value else entry.value_ptr.*;
+ _ = try to.getOrPutValue(
+ std.fmt.allocPrint(key_allocator, "process.env.{s}", .{entry.key_ptr.*}) catch unreachable,
+ value,
+ );
+ }
+ }
+ }
+ }
+
+ for (framework_defaults.keys) |key, i| {
+ const value = framework_defaults.values[i];
+
+ if (value.len == 0) {
+ _ = try to.getOrPutValue(key, empty_string_value);
+ } else {
+ _ = try to.getOrPutValue(key, value);
+ }
+ }
+
+ return key_buf;
+ }
+
+ pub fn init(map: *Map, allocator: *std.mem.Allocator) Loader {
+ return Loader{
+ .map = map,
+ .allocator = allocator,
+ };
+ }
+
+ pub fn loadProcess(this: *Loader) void {
+ if (this.did_load_process) return;
+
+ // This is a little weird because it's evidently stored line-by-line
+ var source = logger.Source.initPathString("process.env", "");
+ for (std.os.environ) |env| {
+ source.contents = std.mem.span(env);
+ Parser.parse(&source, this.allocator, this.map, true);
+ }
+ this.did_load_process = true;
+ }
+
+ // mostly for tests
+ pub fn loadFromString(this: *Loader, str: string, comptime overwrite: bool) void {
+ var source = logger.Source.initPathString("test", str);
+ Parser.parse(&source, this.allocator, this.map, overwrite);
+ std.mem.doNotOptimizeAway(&source);
+ }
+
+ // .env.local goes first
+ // Load .env.development if development
+ // Load .env.production if !development
+ // .env goes last
+ pub fn load(
+ this: *Loader,
+ fs: *Fs.FileSystem.RealFS,
+ dir: *Fs.FileSystem.DirEntry,
+ comptime development: bool,
+ ) !void {
+ const start = std.time.nanoTimestamp();
+ var dir_handle: std.fs.Dir = std.fs.cwd();
+ var can_auto_close = false;
+
+ if (dir.hasComptimeQuery(".env.local")) {
+ try this.loadEnvFile(fs, dir_handle, ".env.local", false);
+ }
+
+ if (comptime development) {
+ if (dir.hasComptimeQuery(".env.development")) {
+ try this.loadEnvFile(fs, dir_handle, ".env.development", false);
+ }
+ } else {
+ if (dir.hasComptimeQuery(".env.production")) {
+ try this.loadEnvFile(fs, dir_handle, ".env.production", false);
+ }
+ }
+
+ if (dir.hasComptimeQuery(".env")) {
+ try this.loadEnvFile(fs, dir_handle, ".env", false);
+ }
+
+ this.printLoaded(start);
+ }
+
+ pub fn printLoaded(this: *Loader, start: i128) void {
+ const count =
+ @intCast(u8, @boolToInt(this.@".env.local" != null)) +
+ @intCast(u8, @boolToInt(this.@".env.development" != null)) +
+ @intCast(u8, @boolToInt(this.@".env.production" != null)) +
+ @intCast(u8, @boolToInt(this.@".env" != null));
+
+ if (count == 0) return;
+ const elapsed = @intToFloat(f64, (std.time.nanoTimestamp() - start)) / std.time.ns_per_ms;
+
+ const all = [_]string{
+ ".env.local",
+ ".env.development",
+ ".env.production",
+ ".env",
+ };
+ const loaded = [_]bool{
+ this.@".env.local" != null,
+ this.@".env.development" != null,
+ this.@".env.production" != null,
+ this.@".env" != null,
+ };
+
+ var loaded_i: u8 = 0;
+ Output.printElapsed(elapsed);
+ Output.prettyError(" <d>", .{});
+
+ for (loaded) |yes, i| {
+ if (yes) {
+ loaded_i += 1;
+ if (count == 1 or (loaded_i >= count and count > 1)) {
+ Output.prettyError("\"{s}\"", .{all[i]});
+ } else {
+ Output.prettyError("\"{s}\", ", .{all[i]});
+ }
+ }
+ }
+ Output.prettyErrorln("<r>\n", .{});
+ Output.flush();
+ }
+
+ pub fn loadEnvFile(this: *Loader, fs: *Fs.FileSystem.RealFS, dir: std.fs.Dir, comptime base: string, comptime override: bool) !void {
+ if (@field(this, base) != null) {
+ return;
+ }
+
+ var file = dir.openFile(base, .{ .read = true }) catch |err| {
+ switch (err) {
+ error.FileNotFound => {
+ // prevent retrying
+ @field(this, base) = logger.Source.initPathString(base, "");
+ return;
+ },
+ else => {
+ return err;
+ },
+ }
+ };
+ Fs.FileSystem.setMaxFd(file.handle);
+
+ defer {
+ if (fs.needToCloseFiles()) {
+ file.close();
+ }
+ }
+ const stat = try file.stat();
+ if (stat.size == 0) {
+ @field(this, base) = logger.Source.initPathString(base, "");
+ return;
+ }
+
+ var buf = try this.allocator.allocSentinel(u8, stat.size, 0);
+ errdefer this.allocator.free(buf);
+ var contents = try file.readAll(buf);
+ // always sentinel
+ buf.ptr[contents + 1] = 0;
+ const source = logger.Source.initPathString(base, buf.ptr[0..contents]);
+
+ Parser.parse(
+ &source,
+ this.allocator,
+ this.map,
+ override,
+ );
+
+ @field(this, base) = source;
+ }
+};
+
+pub const Parser = struct {
+ pub fn parse(
+ source: *const logger.Source,
+ allocator: *std.mem.Allocator,
+ map: *Map,
+ comptime override: bool,
+ ) void {
var lexer = Lexer.init(source);
var fbs = std.io.fixedBufferStream(&temporary_nested_value_buffer);
var writer = fbs.writer();
+
while (lexer.next()) |variable| {
if (variable.has_nested_value) {
writer.context.reset();
- lexer.eatNestedValue(Map, &map, @TypeOf(writer), writer, variable, Map.get) catch unreachable;
+
+ lexer.eatNestedValue(Map, map, @TypeOf(writer), writer, variable, Map.get) catch unreachable;
const new_value = fbs.buffer[0..fbs.pos];
if (new_value.len > 0) {
- map.put(variable.key, allocator.dupe(u8, new_value) catch unreachable) catch unreachable;
+ if (comptime override) {
+ map.put(variable.key, allocator.dupe(u8, new_value) catch unreachable) catch unreachable;
+ } else {
+ var putter = map.map.getOrPut(variable.key) catch unreachable;
+ if (!putter.found_existing) {
+ putter.value_ptr.* = allocator.dupe(u8, new_value) catch unreachable;
+ }
+ }
}
} else {
- map.put(variable.key, variable.value) catch unreachable;
+ if (comptime override) {
+ map.put(variable.key, variable.value) catch unreachable;
+ } else {
+ map.putDefault(variable.key, variable.value) catch unreachable;
+ }
}
}
-
- return map;
}
};
@@ -361,7 +642,7 @@ pub const Map = struct {
return Map{ .map = HashTable.init(allocator) };
}
- pub inline fn iter(this: *Map) !HashTable.Iterator {
+ pub inline fn iter(this: *Map) HashTable.Iterator {
return this.map.iterator();
}
@@ -380,9 +661,9 @@ pub const Map = struct {
_ = try this.map.getOrPutValue(key, value);
}
- pub fn merge(this: *Map, other: *Map) !void {}
-
- pub fn copyPrefixed(this: *Map, other: *Map) !void {}
+ pub inline fn getOrPut(this: *Map, key: string, value: string) !void {
+ _ = try this.map.getOrPutValue(key, value);
+ }
};
const expectString = std.testing.expectEqualStrings;
@@ -422,10 +703,18 @@ test "DotEnv Loader" {
\\
;
const source = logger.Source.initPathString(".env", VALID_ENV);
- const map = Parser.parse(&source, std.heap.c_allocator);
+ var map = Map.init(std.heap.c_allocator);
+ Parser.parse(
+ &source,
+ std.heap.c_allocator,
+ &map,
+ true,
+ );
+ try expectString(map.get("NESTED_VALUES_RESPECT_ESCAPING").?, "'\\$API_KEY'");
+
try expectString(map.get("NESTED_VALUE").?, "'verysecure'");
try expectString(map.get("RECURSIVE_NESTED_VALUE").?, "'verysecure':verysecure");
- try expectString(map.get("NESTED_VALUES_RESPECT_ESCAPING").?, "'\\$API_KEY'");
+
try expectString(map.get("API_KEY").?, "verysecure");
try expectString(map.get("process.env.WAT").?, "ABCDEFGHIJKLMNOPQRSTUVWXYZZ10239457123");
try expectString(map.get("DOUBLE-QUOTED_SHOULD_PRESERVE_NEWLINES").?, "\"\nya\n\"");
@@ -438,3 +727,73 @@ test "DotEnv Loader" {
try expectString(map.get("IGNORING_DOESNT_BREAK_OTHER_LINES").?, "'yes'");
try expectString(map.get("LEADING_SPACE_IN_UNQUOTED_VALUE_IS_TRIMMED").?, "yes");
}
+
+test "DotEnv Process" {
+ var map = Map.init(std.heap.c_allocator);
+ var process = try std.process.getEnvMap(std.heap.c_allocator);
+ var loader = Loader.init(&map, std.heap.c_allocator);
+ loader.loadProcess();
+
+ try expectString(loader.map.get("TMPDIR").?, process.get("TMPDIR").?);
+ try expect(loader.map.get("TMPDIR").?.len > 0);
+
+ try expectString(loader.map.get("USER").?, process.get("USER").?);
+ try expect(loader.map.get("USER").?.len > 0);
+}
+
+test "DotEnv Loader.copyForDefine" {
+ const UserDefine = std.StringArrayHashMap(string);
+
+ var map = Map.init(std.heap.c_allocator);
+ var loader = Loader.init(&map, std.heap.c_allocator);
+ const framework_keys = [_]string{ "process.env.BACON", "process.env.HOSTNAME" };
+ const framework_values = [_]string{ "true", "\"localhost\"" };
+ const framework = Api.StringMap{
+ .keys = std.mem.span(&framework_keys),
+ .values = std.mem.span(&framework_values),
+ };
+
+ const user_overrides: string =
+ \\BACON=false
+ \\HOSTNAME=example.com
+ \\THIS_SHOULDNT_BE_IN_DEFINES_MAP=true
+ \\
+ ;
+
+ const skip_user_overrides: string =
+ \\THIS_SHOULDNT_BE_IN_DEFINES_MAP=true
+ \\
+ ;
+
+ loader.loadFromString(skip_user_overrides, false);
+
+ var user_defines = UserDefine.init(std.heap.c_allocator);
+ var buf = try loader.copyForDefine(UserDefine, &user_defines, framework, .disable, "", std.heap.c_allocator);
+
+ try expect(user_defines.get("process.env.THIS_SHOULDNT_BE_IN_DEFINES_MAP") == null);
+
+ user_defines = UserDefine.init(std.heap.c_allocator);
+
+ loader.loadFromString(user_overrides, true);
+
+ buf = try loader.copyForDefine(
+ UserDefine,
+ &user_defines,
+ framework,
+ Api.DotEnvBehavior.load_all,
+ "",
+ std.heap.c_allocator,
+ );
+
+ try expect(user_defines.get("process.env.BACON") != null);
+ try expectString(user_defines.get("process.env.BACON").?, "false");
+ try expectString(user_defines.get("process.env.HOSTNAME").?, "example.com");
+ try expect(user_defines.get("process.env.THIS_SHOULDNT_BE_IN_DEFINES_MAP") != null);
+
+ user_defines = UserDefine.init(std.heap.c_allocator);
+
+ buf = try loader.copyForDefine(UserDefine, &user_defines, framework, .prefix, "HO", std.heap.c_allocator);
+
+ try expectString(user_defines.get("process.env.HOSTNAME").?, "example.com");
+ try expect(user_defines.get("process.env.THIS_SHOULDNT_BE_IN_DEFINES_MAP") == null);
+}
diff --git a/src/fs.zig b/src/fs.zig
index 6a6656fa7..fb856910e 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -259,11 +259,11 @@ pub const FileSystem = struct {
query[i] = std.ascii.toLower(c);
};
- const query_hashed = DirEntry.EntryMap.getHash(&query);
+ const query_hashed = comptime DirEntry.EntryMap.getHash(&query);
const result_index = entry.data.getWithHash(&query, query_hashed) orelse return null;
const result = EntryStore.instance.at(result_index) orelse return null;
- if (!strings.eql(result.base, query)) {
+ if (!strings.eqlComptime(result.base, query)) {
return Entry.Lookup{ .entry = result, .diff_case = Entry.Lookup.DifferentCase{
.dir = entry.dir,
.query = &query,
@@ -273,6 +273,18 @@ pub const FileSystem = struct {
return Entry.Lookup{ .entry = result, .diff_case = null };
}
+
+ pub fn hasComptimeQuery(entry: *const DirEntry, comptime query_str: anytype) bool {
+ comptime var query: [query_str.len]u8 = undefined;
+ comptime for (query_str) |c, i| {
+ query[i] = std.ascii.toLower(c);
+ };
+
+ const query_hashed = comptime DirEntry.EntryMap.getHash(&query);
+
+ const result_index = entry.data.getWithHash(&query, query_hashed) orelse return false;
+ return result_index.index != allocators.NotFound.index and result_index.index != allocators.Unassigned.index;
+ }
};
pub const Entry = struct {
diff --git a/src/global.zig b/src/global.zig
index f7c9ddc9e..e13b978f7 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -79,6 +79,9 @@ pub const Output = struct {
}
}
+ pub fn printElapsed(elapsed: f64) void {
+ Output.prettyError("<r><d>[<b>{d:>.2}ms<r><d>]<r>", .{elapsed});
+ }
pub fn printErrorable(comptime fmt: string, args: anytype) !void {
if (comptime isWasm) {
try source.stream.seekTo(0);
diff --git a/src/http.zig b/src/http.zig
index b60487fbc..5ea455dcc 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -15,6 +15,7 @@ const Css = @import("css_scanner.zig");
const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
const resolve_path = @import("./resolver/resolve_path.zig");
const OutputFile = Options.OutputFile;
+const DotEnv = @import("./env_loader.zig");
pub fn constStrToU8(s: string) []u8 {
return @intToPtr([*]u8, @ptrToInt(s.ptr))[0..s.len];
}
@@ -691,6 +692,7 @@ pub const RequestContext = struct {
existing_bundle: ?*NodeModuleBundle,
log: ?*logger.Log = null,
watcher: *Watcher,
+ env_loader: *DotEnv.Loader,
};
pub const Channel = sync.Channel(*JavaScriptHandler, .{ .Static = 100 });
@@ -725,7 +727,13 @@ pub const RequestContext = struct {
js_ast.Expr.Data.Store.create(std.heap.c_allocator);
defer Output.flush();
- var vm = JavaScript.VirtualMachine.init(std.heap.c_allocator, handler.args, handler.existing_bundle, handler.log) catch |err| {
+ var vm = JavaScript.VirtualMachine.init(
+ std.heap.c_allocator,
+ handler.args,
+ handler.existing_bundle,
+ handler.log,
+ handler.env_loader,
+ ) catch |err| {
Output.prettyErrorln(
"JavaScript VM failed to start: <r><red>{s}<r>",
.{@errorName(err)},
@@ -733,7 +741,8 @@ pub const RequestContext = struct {
Output.flush();
return;
};
- vm.bundler.configureRouter() catch {};
+ vm.bundler.configureRouter(false) catch {};
+ try vm.bundler.configureDefines();
std.debug.assert(JavaScript.VirtualMachine.vm_loaded);
javascript_vm = vm;
@@ -856,6 +865,7 @@ pub const RequestContext = struct {
.existing_bundle = null,
.log = &server.log,
.watcher = server.watcher,
+ .env_loader = server.bundler.env,
},
);
} else {
@@ -866,6 +876,7 @@ pub const RequestContext = struct {
.existing_bundle = server.bundler.options.node_modules_bundle,
.log = &server.log,
.watcher = server.watcher,
+ .env_loader = server.bundler.env,
},
);
}
@@ -1958,9 +1969,9 @@ pub const Server = struct {
.transform_options = options,
.timer = try std.time.Timer.start(),
};
- server.bundler = try Bundler.init(allocator, &server.log, options, null);
+ server.bundler = try Bundler.init(allocator, &server.log, options, null, null);
server.bundler.configureLinker();
- try server.bundler.configureRouter();
+ try server.bundler.configureRouter(true);
try server.initWatcher();
diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig
index 9945b9283..21756bf9f 100644
--- a/src/javascript/jsc/javascript.zig
+++ b/src/javascript/jsc/javascript.zig
@@ -21,6 +21,7 @@ usingnamespace @import("./bindings/bindings.zig");
const Runtime = @import("../../runtime.zig");
const Router = @import("./api/router.zig");
const ImportRecord = ast.ImportRecord;
+const DotEnv = @import("../../env_loader.zig");
pub const GlobalClasses = [_]type{
Request.Class,
@@ -283,6 +284,7 @@ pub const VirtualMachine = struct {
_args: Api.TransformOptions,
existing_bundle: ?*NodeModuleBundle,
_log: ?*logger.Log,
+ env_loader: *DotEnv.Loader,
) !*VirtualMachine {
var log: *logger.Log = undefined;
if (_log) |__log| {
@@ -299,6 +301,7 @@ pub const VirtualMachine = struct {
log,
try configureTransformOptionsForSpeedyVM(allocator, _args),
existing_bundle,
+ env_loader,
);
VirtualMachine.vm.* = VirtualMachine{
.global = undefined,
@@ -313,7 +316,7 @@ pub const VirtualMachine = struct {
};
VirtualMachine.vm.bundler.configureLinker();
- try VirtualMachine.vm.bundler.configureFramework();
+ try VirtualMachine.vm.bundler.configureFramework(false);
if (_args.serve orelse false) {
VirtualMachine.vm.bundler.linker.onImportCSS = Wundle.onImportCSS;
diff --git a/src/options.zig b/src/options.zig
index a9211f2a2..44a4f06b3 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -13,6 +13,8 @@ const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
const URL = @import("./query_string_map.zig").URL;
usingnamespace @import("global.zig");
+const DotEnv = @import("./env_loader.zig");
+
const assert = std.debug.assert;
pub const WriteDestination = enum {
@@ -39,7 +41,7 @@ pub fn stringHashMapFromArrays(comptime t: type, allocator: *std.mem.Allocator,
if (keys.len > 0) {
try hash_map.ensureCapacity(@intCast(u32, keys.len));
for (keys) |key, i| {
- try hash_map.put(key, values[i]);
+ hash_map.putAssumeCapacity(key, values[i]);
}
}
@@ -251,10 +253,17 @@ pub const Platform = enum {
speedy,
node,
+ pub inline fn isClient(this: Platform) bool {
+ return switch (this) {
+ .speedy => false,
+ else => true,
+ };
+ }
+
const browser_define_value_true = "true";
const browser_define_value_false = "false";
- pub fn processBrowserDefineValue(this: Platform) ?string {
+ pub inline fn processBrowserDefineValue(this: Platform) ?string {
return switch (this) {
.browser => browser_define_value_true,
.speedy, .node => browser_define_value_false,
@@ -262,7 +271,7 @@ pub const Platform = enum {
};
}
- pub fn isWebLike(platform: Platform) bool {
+ pub inline fn isWebLike(platform: Platform) bool {
return switch (platform) {
.neutral, .browser => true,
else => false,
@@ -570,6 +579,8 @@ pub fn definesFromTransformOptions(
_input_define: ?Api.StringMap,
hmr: bool,
platform: Platform,
+ loader: ?*DotEnv.Loader,
+ framework_env: ?*const Env,
) !*defines.Define {
var input_user_define = _input_define orelse std.mem.zeroes(Api.StringMap);
@@ -579,6 +590,29 @@ pub fn definesFromTransformOptions(
input_user_define.keys,
input_user_define.values,
);
+
+ if (loader) |_loader| {
+ if (framework_env) |framework| {
+ _ = try _loader.copyForDefine(
+ defines.RawDefines,
+ &user_defines,
+ framework.toAPI().defaults,
+ framework.behavior,
+ framework.prefix,
+ allocator,
+ );
+ } else {
+ _ = try _loader.copyForDefine(
+ defines.RawDefines,
+ &user_defines,
+ std.mem.zeroes(Api.StringMap),
+ Api.DotEnvBehavior.disable,
+ "",
+ allocator,
+ );
+ }
+ }
+
if (input_user_define.keys.len == 0) {
try user_defines.put(DefaultUserDefines.NodeEnv.Key, DefaultUserDefines.NodeEnv.Value);
}
@@ -675,6 +709,29 @@ pub const BundleOptions = struct {
import_path_format: ImportPathFormat = ImportPathFormat.relative,
framework: ?Framework = null,
routes: RouteConfig = RouteConfig.zero(),
+ defines_loaded: bool = false,
+ env: Env = Env{},
+ transform_options: Api.TransformOptions,
+
+ pub fn areDefinesUnset(this: *const BundleOptions) bool {
+ return !this.defines_loaded;
+ }
+
+ pub fn loadDefines(this: *BundleOptions, allocator: *std.mem.Allocator, loader: ?*DotEnv.Loader, env: ?*const Env) !void {
+ if (this.defines_loaded) {
+ return;
+ }
+ this.define = try definesFromTransformOptions(
+ allocator,
+ this.log,
+ this.transform_options.define,
+ this.transform_options.serve orelse false,
+ this.platform,
+ loader,
+ env,
+ );
+ this.defines_loaded = true;
+ }
pub fn asJavascriptBundleConfig(this: *const BundleOptions) Api.JavascriptBundleConfig {}
@@ -697,7 +754,13 @@ pub const BundleOptions = struct {
pub var ExtensionOrder = [_]string{ ".tsx", ".ts", ".jsx", ".js", ".json", ".css" };
};
- pub fn fromApi(allocator: *std.mem.Allocator, fs: *Fs.FileSystem, log: *logger.Log, transform: Api.TransformOptions, node_modules_bundle_existing: ?*NodeModuleBundle) !BundleOptions {
+ pub fn fromApi(
+ allocator: *std.mem.Allocator,
+ fs: *Fs.FileSystem,
+ log: *logger.Log,
+ transform: Api.TransformOptions,
+ node_modules_bundle_existing: ?*NodeModuleBundle,
+ ) !BundleOptions {
const output_dir_parts = [_]string{ try std.process.getCwdAlloc(allocator), transform.output_dir orelse "out" };
var opts: BundleOptions = BundleOptions{
.log = log,
@@ -710,6 +773,8 @@ pub const BundleOptions = struct {
.external = undefined,
.entry_points = transform.entry_points,
.out_extensions = undefined,
+ .env = Env.init(allocator),
+ .transform_options = transform,
};
if (transform.origin) |origin| {
@@ -764,8 +829,6 @@ pub const BundleOptions = struct {
else => {},
}
- opts.define = try definesFromTransformOptions(allocator, log, transform.define, transform.serve orelse false, opts.platform);
-
if (!(transform.generate_node_module_bundle orelse false)) {
if (node_modules_bundle_existing) |node_mods| {
opts.node_modules_bundle = node_mods;
@@ -798,19 +861,29 @@ pub const BundleOptions = struct {
}
const elapsed = @intToFloat(f64, (std.time.nanoTimestamp() - time_start)) / std.time.ns_per_ms;
+ Output.printElapsed(elapsed);
Output.prettyErrorln(
- "<r><b><d>\"{s}\"<r><d> - {d} modules, {d} packages <b>[{d:>.2}ms]<r>",
+ " <b><d>\"{s}\"<r><d> - {d} modules, {d} packages<r>",
.{
pretty_path,
node_module_bundle.bundle.modules.len,
node_module_bundle.bundle.packages.len,
- elapsed,
},
);
Output.flush();
if (transform.framework == null) {
if (node_module_bundle.container.framework) |loaded_framework| {
- opts.framework = Framework.fromLoadedFramework(loaded_framework);
+ opts.framework = Framework.fromLoadedFramework(loaded_framework, allocator);
+ opts.framework.?.client_env.allocator = allocator;
+ opts.framework.?.server_env.allocator = allocator;
+
+ if (transform.define == null) {
+ if (opts.platform.isClient()) {
+ opts.env = opts.framework.?.client_env;
+ } else {
+ opts.env = opts.framework.?.server_env;
+ }
+ }
}
}
@@ -832,21 +905,31 @@ pub const BundleOptions = struct {
}
}
- if (transform.main_fields.len > 0) {
- opts.main_fields = transform.main_fields;
- }
-
- opts.external = ExternalModules.init(allocator, &fs.fs, fs.top_level_dir, transform.external, log, opts.platform);
- opts.out_extensions = opts.platform.outExtensions(allocator);
-
if (transform.framework) |_framework| {
opts.framework = try Framework.fromApi(_framework);
+
+ if (_framework.client_env) |env| {
+ opts.framework.?.client_env.allocator = allocator;
+ try opts.framework.?.client_env.setFromAPI(env);
+ }
+
+ if (_framework.server_env) |env| {
+ opts.framework.?.server_env.allocator = allocator;
+ try opts.framework.?.server_env.setFromAPI(env);
+ }
}
if (transform.router) |routes| {
opts.routes = try RouteConfig.fromApi(routes, allocator);
}
+ if (transform.main_fields.len > 0) {
+ opts.main_fields = transform.main_fields;
+ }
+
+ opts.external = ExternalModules.init(allocator, &fs.fs, fs.top_level_dir, transform.external, log, opts.platform);
+ opts.out_extensions = opts.platform.outExtensions(allocator);
+
if (transform.serve orelse false) {
opts.preserve_extensions = true;
opts.append_package_version_in_query_string = true;
@@ -1186,6 +1269,105 @@ pub const TransformResult = struct {
}
};
+pub const Env = struct {
+ const Entry = struct {
+ key: string,
+ value: string,
+ };
+ const List = std.MultiArrayList(Entry);
+
+ behavior: Api.DotEnvBehavior = Api.DotEnvBehavior.disable,
+ prefix: string = "",
+ defaults: List = List{},
+ allocator: *std.mem.Allocator = undefined,
+
+ pub fn init(
+ allocator: *std.mem.Allocator,
+ ) Env {
+ return Env{
+ .allocator = allocator,
+ .defaults = List{},
+ .prefix = "",
+ .behavior = Api.DotEnvBehavior.disable,
+ };
+ }
+
+ pub fn ensureTotalCapacity(this: *Env, capacity: u64) !void {
+ try this.defaults.ensureTotalCapacity(this.allocator, capacity);
+ }
+
+ pub fn setDefaultsMap(this: *Env, defaults: Api.StringMap) !void {
+ this.defaults.shrinkRetainingCapacity(0);
+
+ if (defaults.keys.len == 0) {
+ return;
+ }
+
+ try this.defaults.ensureTotalCapacity(this.allocator, defaults.keys.len);
+
+ for (defaults.keys) |key, i| {
+ this.defaults.appendAssumeCapacity(.{ .key = key, .value = defaults.values[i] });
+ }
+ }
+
+ // For reading from API
+ pub fn setFromAPI(this: *Env, config: Api.EnvConfig) !void {
+ this.setBehaviorFromPrefix(config.prefix orelse "");
+
+ if (config.defaults) |defaults| {
+ try this.setDefaultsMap(defaults);
+ }
+ }
+
+ pub fn setBehaviorFromPrefix(this: *Env, prefix: string) void {
+ this.behavior = Api.DotEnvBehavior.disable;
+ this.prefix = "";
+
+ if (strings.eqlComptime(prefix, "*")) {
+ this.behavior = Api.DotEnvBehavior.load_all;
+ } else if (prefix.len > 0) {
+ this.behavior = Api.DotEnvBehavior.prefix;
+ this.prefix = prefix;
+ }
+ }
+
+ pub fn setFromLoaded(this: *Env, config: Api.LoadedEnvConfig, allocator: *std.mem.Allocator) !void {
+ this.allocator = allocator;
+ this.behavior = switch (config.dotenv) {
+ Api.DotEnvBehavior.prefix => Api.DotEnvBehavior.prefix,
+ Api.DotEnvBehavior.load_all => Api.DotEnvBehavior.load_all,
+ else => Api.DotEnvBehavior.disable,
+ };
+
+ this.prefix = config.prefix;
+
+ try this.setDefaultsMap(config.defaults);
+ }
+
+ pub fn toAPI(this: *const Env) Api.LoadedEnvConfig {
+ var slice = this.defaults.slice();
+
+ return Api.LoadedEnvConfig{
+ .dotenv = this.behavior,
+ .prefix = this.prefix,
+ .defaults = .{ .keys = slice.items(.key), .values = slice.items(.value) },
+ };
+ }
+
+ // For reading from package.json
+ pub fn getOrPutValue(this: *Env, key: string, value: string) !void {
+ var slice = this.defaults.slice();
+ const keys = slice.items(.key);
+ for (keys) |_key, i| {
+ if (strings.eql(key, _key)) {
+ return;
+ }
+ }
+
+ try this.defaults.append(this.allocator, .{ .key = key, .value = value });
+ }
+};
+
pub const Framework = struct {
client: string,
server: string,
@@ -1194,21 +1376,8 @@ pub const Framework = struct {
resolved: bool = false,
from_bundle: bool = false,
- client_env: ?Env = null,
- server_env: ?Env = null,
-
- pub const Env = struct {
- pub const Map = std.StringArrayHashMap(string);
- defaults: Map,
- prefix: string = "",
-
- pub fn init(allocator: *std.mem.Allocator, prefix: string) Env {
- return Env{
- .defaults = Map.init(allocator),
- .prefix = prefix,
- };
- }
- };
+ client_env: Env = Env{},
+ server_env: Env = Env{},
fn normalizedPath(allocator: *std.mem.Allocator, toplevel_path: string, path: string) !string {
std.debug.assert(std.fs.path.isAbsolute(path));
@@ -1231,16 +1400,24 @@ pub const Framework = struct {
}
}
- pub fn fromLoadedFramework(loaded: Api.LoadedFramework) Framework {
+ pub fn fromLoadedFramework(loaded: Api.LoadedFramework, allocator: *std.mem.Allocator) Framework {
const client = if (loaded.client) loaded.entry_point else "";
const server = if (!loaded.client) loaded.entry_point else "";
- return Framework{
+ var framework = Framework{
.client = client,
.server = server,
.package = loaded.package,
.development = loaded.development,
.from_bundle = true,
};
+
+ if (loaded.client) {
+ framework.client_env.setFromLoaded(loaded.env, allocator) catch {};
+ } else {
+ framework.server_env.setFromLoaded(loaded.env, allocator) catch {};
+ }
+
+ return framework;
}
pub fn toAPI(this: *const Framework, allocator: *std.mem.Allocator, toplevel_path: string, comptime client: bool) ?Api.LoadedFramework {
@@ -1251,6 +1428,7 @@ pub const Framework = struct {
.package = this.package,
.development = this.development,
.client = true,
+ .env = this.client_env.toAPI(),
};
}
} else {
@@ -1260,6 +1438,7 @@ pub const Framework = struct {
.package = this.package,
.development = this.development,
.client = false,
+ .env = this.server_env.toAPI(),
};
}
}
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 160f748fd..3191f0a59 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -61,7 +61,71 @@ pub const PackageJSON = struct {
//
browser_map: BrowserMap,
- fn loadFrameworkExpression(framework: *options.Framework, json: js_ast.Expr, allocator: *std.mem.Allocator) bool {
+ fn loadDefineDefaults(
+ env: *options.Env,
+ json: *const js_ast.E.Object,
+ allocator: *std.mem.Allocator,
+ ) !void {
+ var valid_count: usize = 0;
+ for (json.properties) |prop| {
+ if (prop.value.?.data != .e_string) continue;
+ valid_count += 1;
+ }
+
+ env.defaults.shrinkRetainingCapacity(0);
+ env.defaults.ensureTotalCapacity(allocator, valid_count) catch {};
+
+ for (json.properties) |prop| {
+ if (prop.value.?.data != .e_string) continue;
+ env.defaults.appendAssumeCapacity(.{
+ .key = prop.key.?.data.e_string.string(allocator) catch unreachable,
+ .value = prop.value.?.data.e_string.string(allocator) catch unreachable,
+ });
+ }
+ }
+
+ fn loadDefineExpression(
+ env: *options.Env,
+ json: *const js_ast.E.Object,
+ allocator: *std.mem.Allocator,
+ ) anyerror!void {
+ for (json.properties) |prop| {
+ switch (prop.key.?.data) {
+ .e_string => |e_str| {
+ const str = e_str.string(allocator) catch "";
+
+ if (strings.eqlComptime(str, "defaults")) {
+ switch (prop.value.?.data) {
+ .e_object => |obj| {
+ try loadDefineDefaults(env, obj, allocator);
+ },
+ else => {
+ env.defaults.shrinkRetainingCapacity(0);
+ },
+ }
+ } else if (strings.eqlComptime(str, ".env")) {
+ switch (prop.value.?.data) {
+ .e_string => |value_str| {
+ env.setBehaviorFromPrefix(value_str.string(allocator) catch "");
+ },
+ else => {
+ env.behavior = .disable;
+ env.prefix = "";
+ },
+ }
+ }
+ },
+ else => continue,
+ }
+ }
+ }
+
+ fn loadFrameworkExpression(
+ framework: *options.Framework,
+ json: js_ast.Expr,
+ allocator: *std.mem.Allocator,
+ comptime read_define: bool,
+ ) bool {
if (json.asProperty("client")) |client| {
if (client.expr.asString(allocator)) |str| {
if (str.len > 0) {
@@ -70,55 +134,30 @@ pub const PackageJSON = struct {
}
}
- // "env": {
- // "client": {
- // "NEXT_TRAILING_SLASH": false,
- // },
- // "clientPrefix": "NEXT_PUBLIC_",
- // "server": {
- // "NEXT_TRAILING_SLASH": false,
- // },
- // "serverPrefix": "",
- // }
-
- if (json.asProperty("env")) |defines| {
- if (defines.expr.asProperty("client")) |client| {
- if (client.expr.data == .e_object) {
- const object = client.expr.data.e_object;
- var i: usize = 0;
- for (object.properties) |prop| {
- // must be strings
- const key = prop.key orelse continue;
- const value = prop.value orelse continue;
- i += @intCast(usize, @boolToInt(key.data == .e_string and value.data == .e_string));
- }
-
- if (i > 0) {
- if (framework.client_env == null) {
- framework.client_env = options.Framework.Env.init(allocator, "");
- }
- var env = &framework.client_env.?;
- try env.defaults.ensureUnusedCapacity(i);
+ if (comptime read_define) {
+ if (json.asProperty("define")) |defines| {
+ if (defines.expr.asProperty("client")) |client| {
+ if (client.expr.data == .e_object) {
+ const object = client.expr.data.e_object;
+ framework.client_env = options.Env.init(
+ allocator,
+ );
- for (object.properties) |prop| {
- // must be strings
- // not for any good reason.
- // we should fix this later
+ loadDefineExpression(&framework.client_env, object, allocator) catch {};
+ }
+ }
- const key = prop.key orelse continue;
- const value = prop.value orelse continue;
- if (key.data != .e_string or value.data != .e_string) continue;
+ if (defines.expr.asProperty("server")) |server| {
+ if (server.expr.data == .e_object) {
+ const object = server.expr.data.e_object;
+ framework.server_env = options.Env.init(
+ allocator,
+ );
- var res = try define.getOrPut(try key.asString(allocator));
- if (!res.found_existing) {
- res.value_ptr.* = try value.asString(allocator);
- }
- }
+ loadDefineExpression(&framework.server_env, object, allocator) catch {};
}
}
}
-
- if (defines.expr.asProperty("server")) |server| {}
}
if (json.asProperty("server")) |server| {
@@ -132,7 +171,14 @@ pub const PackageJSON = struct {
return framework.client.len > 0;
}
- pub fn loadFrameworkWithPreference(package_json: *const PackageJSON, pair: *FrameworkRouterPair, json: js_ast.Expr, allocator: *std.mem.Allocator, comptime load_framework: LoadFramework) void {
+ pub fn loadFrameworkWithPreference(
+ package_json: *const PackageJSON,
+ pair: *FrameworkRouterPair,
+ json: js_ast.Expr,
+ allocator: *std.mem.Allocator,
+ comptime read_defines: bool,
+ comptime load_framework: LoadFramework,
+ ) void {
const framework_object = json.asProperty("framework") orelse return;
if (framework_object.expr.asProperty("static")) |static_prop| {
@@ -197,7 +243,7 @@ pub const PackageJSON = struct {
switch (comptime load_framework) {
.development => {
if (framework_object.expr.asProperty("development")) |env| {
- if (loadFrameworkExpression(pair.framework, env.expr, allocator)) {
+ if (loadFrameworkExpression(pair.framework, env.expr, allocator, read_defines)) {
pair.framework.package = package_json.name;
pair.framework.development = true;
if (env.expr.asProperty("static")) |static_prop| {
@@ -215,7 +261,7 @@ pub const PackageJSON = struct {
},
.production => {
if (framework_object.expr.asProperty("production")) |env| {
- if (loadFrameworkExpression(pair.framework, env.expr, allocator)) {
+ if (loadFrameworkExpression(pair.framework, env.expr, allocator, read_defines)) {
pair.framework.package = package_json.name;
pair.framework.development = false;
@@ -235,7 +281,7 @@ pub const PackageJSON = struct {
else => unreachable,
}
- if (loadFrameworkExpression(pair.framework, framework_object.expr, allocator)) {
+ if (loadFrameworkExpression(pair.framework, framework_object.expr, allocator, read_defines)) {
pair.framework.package = package_json.name;
pair.framework.development = false;
}
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 0c97ea6c8..9ff5a4eab 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -376,6 +376,7 @@ pub fn NewResolver(cache_files: bool) type {
package: string,
pair: *PackageJSON.FrameworkRouterPair,
comptime preference: PackageJSON.LoadFramework,
+ comptime load_defines: bool,
) !void {
// TODO: make this only parse package.json once
@@ -384,7 +385,7 @@ pub fn NewResolver(cache_files: bool) type {
const pkg: *const PackageJSON = result.package_json orelse r.packageJSONForResolvedNodeModuleWithIgnoreMissingName(&result, true) orelse return error.MissingPackageJSON;
const json: Expr = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError;
- pkg.loadFrameworkWithPreference(pair, json, r.allocator, preference);
+ pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference);
const dir = pkg.source.path.name.dirWithTrailingSlash();
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
if (pair.framework.client.len > 0) {