aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-07-13 10:32:57 -0700
committerGravatar Jarred Sumner <jarred@jarredsumner.com> 2021-07-13 10:32:57 -0700
commitfea9faaf4cd110591e9e6f07cd4d17cbc0ea5918 (patch)
treeb59ec68d139ebcb339cd7bd6d1d15eb71a423b4f /src
parentb1b459435f375bf5eef7c6aeb7285ac6c2719b62 (diff)
downloadbun-fea9faaf4cd110591e9e6f07cd4d17cbc0ea5918.tar.gz
bun-fea9faaf4cd110591e9e6f07cd4d17cbc0ea5918.tar.zst
bun-fea9faaf4cd110591e9e6f07cd4d17cbc0ea5918.zip
alright
Former-commit-id: ab73c7b323c222e5d1172c07036653ca98aa8e6b
Diffstat (limited to 'src')
-rw-r--r--src/allocators.zig105
-rw-r--r--src/api/schema.d.ts16
-rw-r--r--src/api/schema.js95
-rw-r--r--src/api/schema.peechy12
-rw-r--r--src/api/schema.zig96
-rw-r--r--src/bundler.zig141
-rw-r--r--src/cli.zig23
-rw-r--r--src/deps/picohttp.zig3
-rw-r--r--src/deps/picohttpparser.c3
-rw-r--r--src/feature_flags.zig2
-rw-r--r--src/fs.zig140
-rw-r--r--src/global.zig59
-rw-r--r--src/http.zig28
-rw-r--r--src/javascript/jsc/api/router.zig134
-rw-r--r--src/javascript/jsc/base.zig2
-rw-r--r--src/javascript/jsc/javascript.zig13
-rw-r--r--src/javascript/jsc/webcore/response.zig5
-rw-r--r--src/options.zig99
-rw-r--r--src/resolver/package_json.zig2
-rw-r--r--src/resolver/resolve_path.zig4
-rw-r--r--src/resolver/resolver.zig91
-rw-r--r--src/router.zig644
-rw-r--r--src/strings.zig40
23 files changed, 1504 insertions, 253 deletions
diff --git a/src/allocators.zig b/src/allocators.zig
index f7ba9ff87..0ecb13f6e 100644
--- a/src/allocators.zig
+++ b/src/allocators.zig
@@ -123,10 +123,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
pub var backing_buf_used: u16 = 0;
const Allocator = std.mem.Allocator;
const Self = @This();
- pub const ListIndex = packed struct {
- index: u31,
- is_overflowing: bool = false,
- };
+
overflow_list: std.ArrayListUnmanaged(ValueType),
allocator: *Allocator,
@@ -145,10 +142,10 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
return backing_buf_used >= @as(u16, count);
}
- pub fn at(self: *const Self, index: ListIndex) ?*ValueType {
+ pub fn at(self: *const Self, index: IndexType) ?*ValueType {
if (index.index == NotFound.index or index.index == Unassigned.index) return null;
- if (index.is_overflowing) {
+ if (index.is_overflow) {
return &self.overflow_list.items[index.index];
} else {
return &backing_buf[index.index];
@@ -159,9 +156,9 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
return isSliceInBuffer(value, backing_buf);
}
- pub fn append(self: *Self, value: ValueType) !ListIndex {
- var result = ListIndex{ .index = std.math.maxInt(u31), .is_overflowing = backing_buf_used > max_index };
- if (result.is_overflowing) {
+ pub fn append(self: *Self, value: ValueType) !IndexType {
+ var result = IndexType{ .index = std.math.maxInt(u31), .is_overflow = backing_buf_used > max_index };
+ if (result.is_overflow) {
result.index = @intCast(u31, self.overflow_list.items.len);
try self.overflow_list.append(self.allocator, value);
} else {
@@ -176,10 +173,10 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
return result;
}
- pub fn update(self: *Self, result: *ListIndex, value: ValueType) !*ValueType {
+ pub fn update(self: *Self, result: *IndexType, value: ValueType) !*ValueType {
if (result.index.index == NotFound.index or result.index.index == Unassigned.index) {
- result.index.is_overflowing = backing_buf_used > max_index;
- if (result.index.is_overflowing) {
+ result.index.is_overflow = backing_buf_used > max_index;
+ if (result.index.is_overflow) {
result.index.index = @intCast(u31, self.overflow_list.items.len);
} else {
result.index.index = backing_buf_used;
@@ -190,7 +187,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
}
}
- if (result.index.is_overflowing) {
+ if (result.index.is_overflow) {
if (self.overflow_list.items.len == result.index.index) {
const real_index = self.overflow_list.items.len;
try self.overflow_list.append(self.allocator, value);
@@ -206,7 +203,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
}
}
- pub fn remove(self: *Self, index: ListIndex) void {
+ pub fn remove(self: *Self, index: IndexType) void {
@compileError("Not implemented yet.");
// switch (index) {
// Unassigned.index => {
@@ -234,7 +231,9 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
}
};
}
-pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
+pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type {
+ // + 1 for sentinel
+ const item_length = _item_length + 1;
const count = _count * 2;
const max_index = count - 1;
const ValueType = []const u8;
@@ -246,10 +245,7 @@ pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
pub var backing_buf_used: u64 = undefined;
const Allocator = std.mem.Allocator;
const Self = @This();
- pub const ListIndex = packed struct {
- index: u31,
- is_overflowing: bool = false,
- };
+
overflow_list: std.ArrayListUnmanaged(ValueType),
allocator: *Allocator,
@@ -271,7 +267,7 @@ pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
pub fn at(self: *const Self, index: IndexType) ?ValueType {
if (index.index == NotFound.index or index.index == Unassigned.index) return null;
- if (index.is_overflowing) {
+ if (index.is_overflow) {
return &self.overflow_list.items[index.index];
} else {
return &slice_buf[index.index];
@@ -286,20 +282,67 @@ pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
return constStrToU8(slice);
}
- pub fn append(self: *Self, _value: anytype) ![]const u8 {
- var value = _value;
- if (value.len + backing_buf_used < backing_buf.len - 1) {
+ pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 {
+ const value_len: usize = brk: {
+ switch (comptime AppendType) {
+ []const u8, []u8 => {
+ break :brk _value.len;
+ },
+ else => {
+ var len: usize = 0;
+ for (_value) |val| {
+ len += val.len;
+ }
+ break :brk len;
+ },
+ }
+ unreachable;
+ } + 1;
+
+ var value: [:0]u8 = undefined;
+ if (value_len + backing_buf_used < backing_buf.len - 1) {
const start = backing_buf_used;
- backing_buf_used += value.len;
- std.mem.copy(u8, backing_buf[start..backing_buf_used], _value);
- value = backing_buf[start..backing_buf_used];
+ backing_buf_used += value_len;
+
+ switch (AppendType) {
+ []const u8, []u8 => {
+ std.mem.copy(u8, backing_buf[start .. backing_buf_used - 1], _value);
+ backing_buf[backing_buf_used - 1] = 0;
+ },
+ else => {
+ var remainder = backing_buf[start..];
+ for (_value) |val| {
+ std.mem.copy(u8, remainder, val);
+ remainder = remainder[val.len..];
+ }
+ remainder[0] = 0;
+ },
+ }
+
+ value = backing_buf[start .. backing_buf_used - 1 :0];
} else {
- value = try self.allocator.dupe(u8, _value);
+ var value_buf = try self.allocator.alloc(u8, value_len);
+
+ switch (comptime AppendType) {
+ []const u8, []u8 => {
+ std.mem.copy(u8, value_buf, _value);
+ },
+ else => {
+ var remainder = value_buf;
+ for (_value) |val| {
+ std.mem.copy(u8, remainder, val);
+ remainder = remainder[val.len..];
+ }
+ },
+ }
+
+ value_buf[value_len - 1] = 0;
+ value = value_buf[0 .. value_len - 1 :0];
}
- var result = ListIndex{ .index = std.math.maxInt(u31), .is_overflowing = slice_buf_used > max_index };
+ var result = IndexType{ .index = std.math.maxInt(u31), .is_overflow = slice_buf_used > max_index };
- if (result.is_overflowing) {
+ if (result.is_overflow) {
result.index = @intCast(u31, self.overflow_list.items.len);
} else {
result.index = slice_buf_used;
@@ -309,7 +352,7 @@ pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
}
}
- if (result.is_overflowing) {
+ if (result.is_overflow) {
if (self.overflow_list.items.len == result.index) {
const real_index = self.overflow_list.items.len;
try self.overflow_list.append(self.allocator, value);
@@ -325,7 +368,7 @@ pub fn BSSStringList(comptime _count: usize, comptime item_length: usize) type {
}
}
- pub fn remove(self: *Self, index: ListIndex) void {
+ pub fn remove(self: *Self, index: IndexType) void {
@compileError("Not implemented yet.");
// switch (index) {
// Unassigned.index => {
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index 4b647c4db..c809ec181 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -234,6 +234,15 @@ type uint32 = number;
loaders: Loader[];
}
+ export interface FrameworkConfig {
+ entry_point?: string;
+ }
+
+ export interface RouteConfig {
+ dir?: string;
+ extensions?: string[];
+ }
+
export interface TransformOptions {
jsx?: JSX;
tsconfig_override?: string;
@@ -256,7 +265,8 @@ type uint32 = number;
only_scan_dependencies?: ScanDependencyMode;
generate_node_module_bundle?: boolean;
node_modules_bundle_path?: string;
- javascript_framework_file?: string;
+ framework?: FrameworkConfig;
+ router?: RouteConfig;
}
export interface FileHandle {
@@ -408,6 +418,10 @@ type uint32 = number;
export declare function decodeStringMap(buffer: ByteBuffer): StringMap;
export declare function encodeLoaderMap(message: LoaderMap, bb: ByteBuffer): void;
export declare function decodeLoaderMap(buffer: ByteBuffer): LoaderMap;
+ export declare function encodeFrameworkConfig(message: FrameworkConfig, bb: ByteBuffer): void;
+ export declare function decodeFrameworkConfig(buffer: ByteBuffer): FrameworkConfig;
+ export declare function encodeRouteConfig(message: RouteConfig, bb: ByteBuffer): void;
+ export declare function decodeRouteConfig(buffer: ByteBuffer): RouteConfig;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
diff --git a/src/api/schema.js b/src/api/schema.js
index a8241f345..947e3d9a8 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -569,6 +569,81 @@ bb.writeByte(encoded);
}
+function decodeFrameworkConfig(bb) {
+ var result = {};
+
+ while (true) {
+ switch (bb.readByte()) {
+ case 0:
+ return result;
+
+ case 1:
+ result["entry_point"] = bb.readString();
+ break;
+
+ default:
+ throw new Error("Attempted to parse invalid message");
+ }
+ }
+}
+
+function encodeFrameworkConfig(message, bb) {
+
+ var value = message["entry_point"];
+ if (value != null) {
+ bb.writeByte(1);
+ bb.writeString(value);
+ }
+ bb.writeByte(0);
+
+}
+
+function decodeRouteConfig(bb) {
+ var result = {};
+
+ while (true) {
+ switch (bb.readByte()) {
+ case 0:
+ return result;
+
+ case 1:
+ result["dir"] = bb.readString();
+ break;
+
+ case 2:
+ var length = bb.readVarUint();
+ var values = result["extensions"] = Array(length);
+ for (var i = 0; i < length; i++) values[i] = bb.readString();
+ break;
+
+ default:
+ throw new Error("Attempted to parse invalid message");
+ }
+ }
+}
+
+function encodeRouteConfig(message, bb) {
+
+ var value = message["dir"];
+ if (value != null) {
+ bb.writeByte(1);
+ bb.writeString(value);
+ }
+
+ var value = message["extensions"];
+ if (value != null) {
+ bb.writeByte(2);
+ var values = value, n = values.length;
+ bb.writeVarUint(n);
+ for (var i = 0; i < n; i++) {
+ value = values[i];
+ bb.writeString(value);
+ }
+ }
+ bb.writeByte(0);
+
+}
+
function decodeTransformOptions(bb) {
var result = {};
@@ -672,7 +747,11 @@ function decodeTransformOptions(bb) {
break;
case 22:
- result["javascript_framework_file"] = bb.readString();
+ result["framework"] = decodeFrameworkConfig(bb);
+ break;
+
+ case 23:
+ result["router"] = decodeRouteConfig(bb);
break;
default:
@@ -840,10 +919,16 @@ bb.writeByte(encoded);
bb.writeString(value);
}
- var value = message["javascript_framework_file"];
+ var value = message["framework"];
if (value != null) {
bb.writeByte(22);
- bb.writeString(value);
+ encodeFrameworkConfig(value, bb);
+ }
+
+ var value = message["router"];
+ if (value != null) {
+ bb.writeByte(23);
+ encodeRouteConfig(value, bb);
}
bb.writeByte(0);
@@ -1789,6 +1874,10 @@ export { decodeStringMap }
export { encodeStringMap }
export { decodeLoaderMap }
export { encodeLoaderMap }
+export { decodeFrameworkConfig }
+export { encodeFrameworkConfig }
+export { decodeRouteConfig }
+export { encodeRouteConfig }
export { decodeTransformOptions }
export { encodeTransformOptions }
export { decodeFileHandle }
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index 6f755e907..e558d1055 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -132,6 +132,15 @@ struct LoaderMap {
Loader[] loaders;
}
+message FrameworkConfig {
+ string entry_point = 1;
+}
+
+message RouteConfig {
+ string dir = 1;
+ string[] extensions = 2;
+}
+
message TransformOptions {
JSX jsx = 1;
string tsconfig_override = 2;
@@ -169,7 +178,8 @@ message TransformOptions {
string node_modules_bundle_path = 21;
- string javascript_framework_file = 22;
+ FrameworkConfig framework = 22;
+ RouteConfig router = 23;
}
struct FileHandle {
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 23739441a..02125997e 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -759,6 +759,82 @@ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
};
+pub const FrameworkConfig = struct {
+/// entry_point
+entry_point: ?[]const u8 = null,
+
+
+pub fn decode(reader: anytype) anyerror!FrameworkConfig {
+ var this = std.mem.zeroes(FrameworkConfig);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.entry_point = try reader.readValue([]const u8);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.entry_point) |entry_point| {
+ try writer.writeFieldID(1);
+ try writer.writeValue(entry_point);
+}
+try writer.endMessage();
+}
+
+};
+
+pub const RouteConfig = struct {
+/// dir
+dir: ?[]const u8 = null,
+
+/// extensions
+extensions: []const []const u8,
+
+
+pub fn decode(reader: anytype) anyerror!RouteConfig {
+ var this = std.mem.zeroes(RouteConfig);
+
+ while(true) {
+ switch (try reader.readByte()) {
+ 0 => { return this; },
+
+ 1 => {
+ this.dir = try reader.readValue([]const u8);
+},
+ 2 => {
+ this.extensions = try reader.readArray([]const u8);
+},
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+unreachable;
+}
+
+pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+if (this.dir) |dir| {
+ try writer.writeFieldID(1);
+ try writer.writeValue(dir);
+}
+if (this.extensions) |extensions| {
+ try writer.writeFieldID(2);
+ try writer.writeArray([]const u8, extensions);
+}
+try writer.endMessage();
+}
+
+};
+
pub const TransformOptions = struct {
/// jsx
jsx: ?Jsx = null,
@@ -823,8 +899,11 @@ generate_node_module_bundle: ?bool = null,
/// node_modules_bundle_path
node_modules_bundle_path: ?[]const u8 = null,
-/// javascript_framework_file
-javascript_framework_file: ?[]const u8 = null,
+/// framework
+framework: ?FrameworkConfig = null,
+
+/// router
+router: ?RouteConfig = null,
pub fn decode(reader: anytype) anyerror!TransformOptions {
@@ -898,7 +977,10 @@ pub fn decode(reader: anytype) anyerror!TransformOptions {
this.node_modules_bundle_path = try reader.readValue([]const u8);
},
22 => {
- this.javascript_framework_file = try reader.readValue([]const u8);
+ this.framework = try reader.readValue(FrameworkConfig);
+},
+ 23 => {
+ this.router = try reader.readValue(RouteConfig);
},
else => {
return error.InvalidMessage;
@@ -993,9 +1075,13 @@ if (this.node_modules_bundle_path) |node_modules_bundle_path| {
try writer.writeFieldID(21);
try writer.writeValue(node_modules_bundle_path);
}
-if (this.javascript_framework_file) |javascript_framework_file| {
+if (this.framework) |framework| {
try writer.writeFieldID(22);
- try writer.writeValue(javascript_framework_file);
+ try writer.writeValue(framework);
+}
+if (this.router) |router| {
+ try writer.writeFieldID(23);
+ try writer.writeValue(router);
}
try writer.endMessage();
}
diff --git a/src/bundler.zig b/src/bundler.zig
index 802b7cd82..b74487a50 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -30,6 +30,7 @@ const hash_map = @import("hash_map.zig");
const PackageJSON = @import("./resolver/package_json.zig").PackageJSON;
const DebugLogs = _resolver.DebugLogs;
const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
+const Router = @import("./router.zig");
const Css = @import("css_scanner.zig");
@@ -149,6 +150,8 @@ pub fn NewBundler(cache_files: bool) type {
resolve_queue: ResolveQueue,
elapsed: i128 = 0,
needs_runtime: bool = false,
+ router: ?Router = null,
+
linker: Linker,
timer: Timer = Timer{},
@@ -166,7 +169,10 @@ pub fn NewBundler(cache_files: bool) type {
) !ThisBundler {
js_ast.Expr.Data.Store.create(allocator);
js_ast.Stmt.Data.Store.create(allocator);
- var fs = try Fs.FileSystem.init1(allocator, opts.absolute_working_dir, opts.serve orelse false);
+ var fs = try Fs.FileSystem.init1(
+ allocator,
+ opts.absolute_working_dir,
+ );
const bundle_options = try options.BundleOptions.fromApi(
allocator,
fs,
@@ -206,6 +212,72 @@ pub fn NewBundler(cache_files: bool) type {
);
}
+ pub fn configureFramework(this: *ThisBundler) !void {
+ if (this.options.framework) |*framework| {
+ var framework_file = this.normalizeEntryPointPath(framework.entry_point);
+ var resolved = this.resolver.resolve(
+ this.fs.top_level_dir,
+ framework_file,
+ .entry_point,
+ ) catch |err| {
+ Output.prettyErrorln("Failed to load framework: {s}", .{@errorName(err)});
+ Output.flush();
+ this.options.framework = null;
+ return;
+ };
+
+ framework.entry_point = try this.allocator.dupe(u8, resolved.path_pair.primary.text);
+ }
+ }
+ pub fn configureRouter(this: *ThisBundler) !void {
+ try this.configureFramework();
+
+ // if you pass just a directory, activate the router configured for the pages directory
+ // for now:
+ // - "." is not supported
+ // - multiple pages directories is not supported
+ if (this.options.route_config == null and this.options.entry_points.len == 1) {
+
+ // When inferring:
+ // - pages directory with a file extension is not supported. e.g. "pages.app/" won't work.
+ // This is a premature optimization to avoid this magical auto-detection we do here from meaningfully increasing startup time if you're just passing a file
+ // readDirInfo is a recursive lookup, top-down instead of bottom-up. It opens each folder handle and potentially reads the package.jsons
+ // So it is not fast! Unless it's already cached.
+ var paths = [_]string{std.mem.trimLeft(u8, this.options.entry_points[0], "./")};
+ if (std.mem.indexOfScalar(u8, paths[0], '.') == null) {
+ var pages_dir_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ var entry = this.fs.absBuf(&paths, &pages_dir_buf);
+
+ if (std.fs.path.extension(entry).len == 0) {
+ allocators.constStrToU8(entry).ptr[entry.len] = '/';
+
+ // Only throw if they actually passed in a route config and the directory failed to load
+ var dir_info_ = this.resolver.readDirInfo(entry) catch return;
+ var dir_info = dir_info_ orelse return;
+
+ this.options.route_config = options.RouteConfig{
+ .dir = dir_info.abs_path,
+ .extensions = std.mem.span(&options.RouteConfig.DefaultExtensions),
+ };
+ this.router = try Router.init(this.fs, this.allocator, this.options.route_config.?);
+ try this.router.?.loadRoutes(dir_info, Resolver, &this.resolver, std.math.maxInt(u16), true);
+ }
+ }
+ } else if (this.options.route_config) |*route_config| {
+ var paths = [_]string{route_config.dir};
+ var entry = this.fs.abs(&paths);
+ var dir_info_ = try this.resolver.readDirInfo(entry);
+ var dir_info = dir_info_ orelse return error.MissingRoutesDir;
+
+ this.options.route_config = options.RouteConfig{
+ .dir = dir_info.abs_path,
+ .extensions = route_config.extensions,
+ };
+ this.router = try Router.init(this.fs, this.allocator, this.options.route_config.?);
+ try this.router.?.loadRoutes(dir_info, Resolver, &this.resolver, std.math.maxInt(u16), true);
+ }
+ }
+
pub fn resetStore(bundler: *ThisBundler) void {
js_ast.Expr.Data.Store.reset();
js_ast.Stmt.Data.Store.reset();
@@ -989,7 +1061,7 @@ pub fn NewBundler(cache_files: bool) type {
// Step 1. Parse & scan
const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
var file_path = resolve_result.path_pair.primary;
- file_path.pretty = Linker.relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
+ file_path.pretty = Linker.relative_paths_list.append(string, bundler.fs.relativeTo(file_path.text)) catch unreachable;
var output_file = options.OutputFile{
.input = file_path,
@@ -1134,7 +1206,7 @@ pub fn NewBundler(cache_files: bool) type {
// Step 1. Parse & scan
const loader = bundler.options.loaders.get(resolve_result.path_pair.primary.name.ext) orelse .file;
var file_path = resolve_result.path_pair.primary;
- file_path.pretty = Linker.relative_paths_list.append(bundler.fs.relativeTo(file_path.text)) catch unreachable;
+ file_path.pretty = Linker.relative_paths_list.append(string, bundler.fs.relativeTo(file_path.text)) catch unreachable;
switch (loader) {
.jsx, .tsx, .js, .ts, .json => {
@@ -1609,32 +1681,11 @@ pub fn NewBundler(cache_files: bool) type {
};
}
- pub fn bundle(
- allocator: *std.mem.Allocator,
- log: *logger.Log,
- opts: Api.TransformOptions,
- ) !options.TransformResult {
- var bundler = try ThisBundler.init(allocator, log, opts, null);
- bundler.configureLinker();
-
- if (bundler.options.write and bundler.options.output_dir.len > 0) {}
-
- // 100.00 µs std.fifo.LinearFifo(resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity
- if (bundler.options.resolve_mode != .lazy) {
- try bundler.resolve_queue.ensureUnusedCapacity(24);
- }
-
- var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len);
-
- if (log.level == .verbose) {
- bundler.resolver.debug_logs = try DebugLogs.init(allocator);
- }
-
- var rfs: *Fs.FileSystem.RealFS = &bundler.fs.fs;
-
+ fn enqueueEntryPoints(bundler: *ThisBundler, entry_points: []_resolver.Result, comptime normalize_entry_point: bool) void {
var entry_point_i: usize = 0;
+
for (bundler.options.entry_points) |_entry| {
- var entry: string = bundler.normalizeEntryPointPath(_entry);
+ var entry: string = if (comptime normalize_entry_point) bundler.normalizeEntryPointPath(_entry) else _entry;
defer {
js_ast.Expr.Data.Store.reset();
@@ -1651,7 +1702,7 @@ pub fn NewBundler(cache_files: bool) type {
continue;
}
- try bundler.resolve_results.put(key, result);
+ bundler.resolve_results.put(key, result) catch unreachable;
entry_points[entry_point_i] = result;
if (isDebug) {
@@ -1661,6 +1712,40 @@ pub fn NewBundler(cache_files: bool) type {
entry_point_i += 1;
bundler.resolve_queue.writeItem(result) catch unreachable;
}
+ }
+
+ pub fn bundle(
+ allocator: *std.mem.Allocator,
+ log: *logger.Log,
+ opts: Api.TransformOptions,
+ ) !options.TransformResult {
+ var bundler = try ThisBundler.init(allocator, log, opts, null);
+ bundler.configureLinker();
+ try bundler.configureRouter();
+
+ var skip_normalize = false;
+ if (bundler.router) |router| {
+ bundler.options.entry_points = try router.getEntryPoints(allocator);
+ skip_normalize = true;
+ }
+
+ if (bundler.options.write and bundler.options.output_dir.len > 0) {}
+
+ // 100.00 µs std.fifo.LinearFifo(resolver.Result,std.fifo.LinearFifoBufferType { .Dynamic = {}}).writeItemAssumeCapacity
+ if (bundler.options.resolve_mode != .lazy) {
+ try bundler.resolve_queue.ensureUnusedCapacity(24);
+ }
+
+ var entry_points = try allocator.alloc(_resolver.Result, bundler.options.entry_points.len);
+ if (skip_normalize) {
+ bundler.enqueueEntryPoints(entry_points, false);
+ } else {
+ bundler.enqueueEntryPoints(entry_points, true);
+ }
+
+ if (log.level == .verbose) {
+ bundler.resolver.debug_logs = try DebugLogs.init(allocator);
+ }
if (bundler.options.output_dir_handle == null) {
const outstream = std.io.getStdOut();
diff --git a/src/cli.zig b/src/cli.zig
index 05aa6feae..42bd360cc 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -130,9 +130,8 @@ pub const Cli = struct {
clap.parseParam("--scan Instead of bundling or transpiling, print a list of every file imported by an entry point, recursively") catch unreachable,
clap.parseParam("--new-jsb Generate a new node_modules.jsb file from node_modules and entry point(s)") catch unreachable,
clap.parseParam("--jsb <STR> Use a Speedy JavaScript Bundle (default: \"./node_modules.jsb\" if exists)") catch unreachable,
- clap.parseParam("--framework <STR> Use a JavaScript framework (file path) with --serve") catch unreachable,
- // clap.parseParam("--no-jsb Use a Speedy JavaScript Bundle (default: \"./node_modules.jsb\" if exists)") catch unreachable,
- clap.parseParam("<POS>... Entry points to use") catch unreachable,
+ clap.parseParam("--framework <STR> Use a JavaScript framework (module path)") catch unreachable,
+ clap.parseParam("<POS>... Entry point(s) to use. Can be individual files, npm packages, or one directory. If one directory, it will auto-detect entry points using a filesystem router. If you're using a framework, passing entry points are optional.") catch unreachable,
};
var diag = clap.Diagnostic{};
@@ -184,7 +183,7 @@ pub const Cli = struct {
var jsx_production = args.flag("--jsx-production");
var react_fast_refresh = false;
- var javascript_framework = args.option("--framework");
+ var framework_entry_point = args.option("--framework");
if (serve or args.flag("--new-jsb")) {
react_fast_refresh = true;
@@ -277,16 +276,20 @@ pub const Cli = struct {
};
}
- if (entry_points.len == 0) {
+ var javascript_framework: ?Api.FrameworkConfig = null;
+
+ if (framework_entry_point) |entry| {
+ javascript_framework = Api.FrameworkConfig{
+ .entry_point = entry,
+ };
+ }
+
+ if (entry_points.len == 0 and javascript_framework == null) {
try clap.help(stderr.writer(), &params);
try diag.report(stderr.writer(), error.MissingEntryPoint);
std.process.exit(1);
}
- if (!serve) {
- javascript_framework = null;
- }
-
return Api.TransformOptions{
.jsx = jsx,
.output_dir = output_dir,
@@ -314,7 +317,7 @@ pub const Cli = struct {
.platform = platform,
.only_scan_dependencies = if (args.flag("--scan")) Api.ScanDependencyMode.all else Api.ScanDependencyMode._none,
.generate_node_module_bundle = if (args.flag("--new-jsb")) true else false,
- .javascript_framework_file = javascript_framework,
+ .framework = javascript_framework,
};
}
};
diff --git a/src/deps/picohttp.zig b/src/deps/picohttp.zig
index 5fff599cc..58edb4708 100644
--- a/src/deps/picohttp.zig
+++ b/src/deps/picohttp.zig
@@ -54,6 +54,9 @@ pub const Request = struct {
0,
);
+ // Leave a sentinel value, for JavaScriptCore support.
+ path.ptr[path.len] = 0;
+
return switch (rc) {
-1 => error.BadRequest,
-2 => error.ShortRead,
diff --git a/src/deps/picohttpparser.c b/src/deps/picohttpparser.c
index 5e5783abb..690ded4ec 100644
--- a/src/deps/picohttpparser.c
+++ b/src/deps/picohttpparser.c
@@ -90,7 +90,8 @@
CHECK_EOF(); \
} \
tok = tok_start; \
- toklen = buf - tok_start; \
+ toklen = buf - tok_start;
+ \
} while (0)
static const char *token_char_map = "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index 09264153c..16adec042 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -31,6 +31,8 @@ pub const verbose_watcher = true;
pub const css_supports_fence = true;
+pub const disable_entry_cache = false;
+
pub const CSSModulePolyfill = enum {
// When you import a .css file and you reference the import in JavaScript
// Just return whatever the property key they referenced was
diff --git a/src/fs.zig b/src/fs.zig
index a7eb5d02e..40a99edc7 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -68,7 +68,10 @@ pub const FileSystem = struct {
ENOTDIR,
};
- pub fn init1(allocator: *std.mem.Allocator, top_level_dir: ?string, enable_watcher: bool) !*FileSystem {
+ pub fn init1(
+ allocator: *std.mem.Allocator,
+ top_level_dir: ?string,
+ ) !*FileSystem {
var _top_level_dir = top_level_dir orelse (if (isBrowser) "/project/" else try std.process.getCwdAlloc(allocator));
// Ensure there's a trailing separator in the top level directory
@@ -86,7 +89,10 @@ pub const FileSystem = struct {
instance = FileSystem{
.allocator = allocator,
.top_level_dir = _top_level_dir,
- .fs = Implementation.init(allocator, _top_level_dir, enable_watcher),
+ .fs = Implementation.init(
+ allocator,
+ _top_level_dir,
+ ),
// .stats = std.StringHashMap(Stat).init(allocator),
.dirname_store = DirnameStore.init(allocator),
.filename_store = FilenameStore.init(allocator),
@@ -98,7 +104,7 @@ pub const FileSystem = struct {
}
pub const DirEntry = struct {
- pub const EntryMap = hash_map.StringHashMap(EntryStore.ListIndex);
+ pub const EntryMap = hash_map.StringHashMap(allocators.IndexType);
pub const EntryStore = allocators.BSSList(Entry, Preallocate.Counts.files);
dir: string,
fd: StoredFileDescriptorType = 0,
@@ -122,7 +128,20 @@ pub const FileSystem = struct {
},
}
// entry.name only lives for the duration of the iteration
- var name = FileSystem.FilenameStore.editableSlice(try FileSystem.FilenameStore.instance.append(entry.name));
+
+ var name: []u8 = undefined;
+
+ switch (_kind) {
+ .file => {
+ name = FileSystem.FilenameStore.editableSlice(try FileSystem.FilenameStore.instance.append(@TypeOf(entry.name), entry.name));
+ },
+ .dir => {
+ // FileSystem.FilenameStore here because it's not an absolute path
+ // it's a path relative to the parent directory
+ // so it's a tiny path like "foo" instead of "/bar/baz/foo"
+ name = FileSystem.FilenameStore.editableSlice(try FileSystem.FilenameStore.instance.append(@TypeOf(entry.name), entry.name));
+ },
+ }
for (entry.name) |c, i| {
name[i] = std.ascii.toLower(c);
@@ -380,9 +399,9 @@ pub const FileSystem = struct {
threadlocal var realpath_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
pub fn resolveAlloc(f: *@This(), allocator: *std.mem.Allocator, parts: anytype) !string {
- const joined = f.join(parts);
+ const joined = f.abs(parts);
- const realpath = try std.fs.realpath(joined, (&realpath_buffer));
+ const realpath = f.resolvePath(joined);
return try allocator.dupe(u8, realpath);
}
@@ -395,10 +414,7 @@ pub const FileSystem = struct {
entries_mutex: Mutex = Mutex.init(),
entries: *EntriesOption.Map,
allocator: *std.mem.Allocator,
- do_not_cache_entries: bool = false,
limiter: Limiter,
- watcher: ?std.StringHashMap(WatchData) = null,
- watcher_mutex: Mutex = Mutex.init(),
cwd: string,
parent_fs: *FileSystem = undefined,
file_limit: usize = 32,
@@ -440,7 +456,10 @@ pub const FileSystem = struct {
return limit.cur;
}
- pub fn init(allocator: *std.mem.Allocator, cwd: string, enable_watcher: bool) RealFS {
+ pub fn init(
+ allocator: *std.mem.Allocator,
+ cwd: string,
+ ) RealFS {
const file_limit = adjustUlimit();
return RealFS{
.entries = EntriesOption.Map.init(allocator),
@@ -449,7 +468,6 @@ pub const FileSystem = struct {
.file_limit = file_limit,
.file_quota = file_limit,
.limiter = Limiter.init(allocator, file_limit),
- .watcher = if (enable_watcher) std.StringHashMap(WatchData).init(allocator) else null,
};
}
@@ -518,39 +536,8 @@ pub const FileSystem = struct {
pub const SafetyGap = 3;
};
- fn modKeyError(fs: *RealFS, path: string, err: anyerror) void {
- if (fs.watcher) |*watcher| {
- fs.watcher_mutex.lock();
- defer fs.watcher_mutex.unlock();
- var state = WatchData.State.file_missing;
-
- switch (err) {
- error.Unusable => {
- state = WatchData.State.file_unusable_mod_key;
- },
- else => {},
- }
-
- var entry = watcher.getOrPutValue(path, WatchData{ .state = state }) catch unreachable;
- entry.value_ptr.state = state;
- }
- }
-
pub fn modKeyWithFile(fs: *RealFS, path: string, file: anytype) anyerror!ModKey {
- const key = ModKey.generate(fs, path, file) catch |err| {
- fs.modKeyError(path, err);
- return err;
- };
-
- if (fs.watcher) |*watcher| {
- fs.watcher_mutex.lock();
- defer fs.watcher_mutex.unlock();
-
- var entry = watcher.getOrPutValue(path, WatchData{ .state = .file_has_mod_key, .mod_key = key }) catch unreachable;
- entry.value_ptr.mod_key = key;
- }
-
- return key;
+ return try ModKey.generate(fs, path, file);
}
pub fn modKey(fs: *RealFS, path: string) anyerror!ModKey {
@@ -565,24 +552,6 @@ pub const FileSystem = struct {
return try fs.modKeyWithFile(path, file);
}
- pub const WatchData = struct {
- dir_entries: []string = &([_]string{}),
- file_contents: string = "",
- mod_key: ModKey = ModKey{},
- watch_mutex: Mutex = Mutex.init(),
- state: State = State.none,
-
- pub const State = enum {
- none,
- dir_has_entries,
- dir_missing,
- file_has_mod_key,
- file_need_mod_key,
- file_missing,
- file_unusable_mod_key,
- };
- };
-
pub const EntriesOption = union(Tag) {
entries: DirEntry,
err: DirEntry.Err,
@@ -654,13 +623,7 @@ pub const FileSystem = struct {
}
fn readDirectoryError(fs: *RealFS, dir: string, err: anyerror) !*EntriesOption {
- if (fs.watcher) |*watcher| {
- fs.watcher_mutex.lock();
- defer fs.watcher_mutex.unlock();
- try watcher.put(dir, WatchData{ .state = .dir_missing });
- }
-
- if (!fs.do_not_cache_entries) {
+ if (FeatureFlags.disable_entry_cache) {
fs.entries_mutex.lock();
defer fs.entries_mutex.unlock();
var get_or_put_result = try fs.entries.getOrPut(dir);
@@ -679,11 +642,11 @@ pub const FileSystem = struct {
threadlocal var temp_entries_option: EntriesOption = undefined;
- pub fn readDirectory(fs: *RealFS, _dir: string, _handle: ?std.fs.Dir, recursive: bool) !*EntriesOption {
+ pub fn readDirectory(fs: *RealFS, _dir: string, _handle: ?std.fs.Dir) !*EntriesOption {
var dir = _dir;
var cache_result: ?allocators.Result = null;
- if (!fs.do_not_cache_entries) {
+ if (FeatureFlags.disable_entry_cache) {
fs.entries_mutex.lock();
defer fs.entries_mutex.unlock();
@@ -706,7 +669,7 @@ pub const FileSystem = struct {
// if we get this far, it's a real directory, so we can just store the dir name.
if (_handle == null) {
- dir = try FilenameStore.instance.append(_dir);
+ dir = try DirnameStore.instance.append(string, _dir);
}
// Cache miss: read the directory entries
@@ -717,23 +680,7 @@ pub const FileSystem = struct {
return fs.readDirectoryError(dir, err) catch unreachable;
};
- // if (fs.watcher) |*watcher| {
- // fs.watcher_mutex.lock();
- // defer fs.watcher_mutex.unlock();
- // var _entries = watcher.iterator();
- // const names = try fs.allocator.alloc([]const u8, _entries.len);
- // for (_entries) |entry, i| {
- // names[i] = try fs.allocator.dupe(u8, entry.key);
- // }
- // strings.sortAsc(names);
-
- // try watcher.put(
- // try fs.allocator.dupe(u8, dir),
- // WatchData{ .dir_entries = names, .state = .dir_has_entries },
- // );
- // }
-
- if (!fs.do_not_cache_entries) {
+ if (FeatureFlags.disable_entry_cache) {
fs.entries_mutex.lock();
defer fs.entries_mutex.unlock();
const result = EntriesOption{
@@ -748,14 +695,7 @@ pub const FileSystem = struct {
return &temp_entries_option;
}
- fn readFileError(fs: *RealFS, path: string, err: anyerror) void {
- if (fs.watcher) |*watcher| {
- fs.watcher_mutex.lock();
- defer fs.watcher_mutex.unlock();
- var res = watcher.getOrPutValue(path, WatchData{ .state = .file_missing }) catch unreachable;
- res.value_ptr.state = .file_missing;
- }
- }
+ fn readFileError(fs: *RealFS, path: string, err: anyerror) void {}
pub fn readFileWithHandle(
fs: *RealFS,
@@ -804,14 +744,6 @@ pub const FileSystem = struct {
file_contents = buf[0..read_count];
}
- if (fs.watcher) |*watcher| {
- fs.watcher_mutex.lock();
- defer fs.watcher_mutex.unlock();
- var res = watcher.getOrPutValue(path, WatchData{}) catch unreachable;
- res.value_ptr.state = .file_need_mod_key;
- res.value_ptr.file_contents = file_contents;
- }
-
return File{ .path = Path.init(path), .contents = file_contents };
}
diff --git a/src/global.zig b/src/global.zig
index 3d985a930..5a1428049 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -74,7 +74,7 @@ pub const Output = struct {
}
pub fn printErrorable(comptime fmt: string, args: anytype) !void {
- if (isWasm) {
+ if (comptime isWasm) {
try source.stream.seekTo(0);
try source.stream.writer().print(fmt, args);
const root = @import("root");
@@ -105,7 +105,7 @@ pub const Output = struct {
}
pub fn print(comptime fmt: string, args: anytype) void {
- if (isWasm) {
+ if (comptime isWasm) {
source.stream.seekTo(0) catch return;
source.stream.writer().print(fmt, args) catch return;
const root = @import("root");
@@ -287,7 +287,7 @@ pub const Output = struct {
}
pub fn printError(comptime fmt: string, args: anytype) void {
- if (isWasm) {
+ if (comptime isWasm) {
source.error_stream.seekTo(0) catch return;
source.error_stream.writer().print(fmt, args) catch unreachable;
const root = @import("root");
@@ -300,16 +300,36 @@ pub const Output = struct {
pub const Global = struct {
pub fn panic(comptime fmt: string, args: anytype) noreturn {
- if (isWasm) {
- Output.print(fmt, args);
+ if (comptime isWasm) {
+ Output.printErrorln(fmt, args);
Output.flush();
@panic(fmt);
} else {
+ Output.prettyErrorln(fmt, args);
Output.flush();
std.debug.panic(fmt, args);
}
}
+ // std.debug.assert but happens at runtime
+ pub fn invariant(condition: bool, comptime fmt: string, args: anytype) void {
+ if (!condition) {
+ _invariant(fmt, args);
+ }
+ }
+
+ inline fn _invariant(comptime fmt: string, args: anytype) noreturn {
+ if (comptime isWasm) {
+ Output.printErrorln(fmt, args);
+ Output.flush();
+ @panic(fmt);
+ } else {
+ Output.prettyErrorln(fmt, args);
+ Output.flush();
+ std.os.exit(1);
+ }
+ }
+
pub fn notimpl() noreturn {
Global.panic("Not implemented yet!!!!!", .{});
}
@@ -327,3 +347,32 @@ pub const FileDescriptorType = if (isBrowser) u0 else std.os.fd_t;
// such is often the case with macOS
// As a useful optimization, we can store file descriptors and just keep them open...forever
pub const StoredFileDescriptorType = if (isWindows or isBrowser) u0 else std.os.fd_t;
+
+pub const PathBuilder = struct {
+ const StringBuilderType = NewStringBuilder(std.fs.MAX_PATH_BYTES);
+ builder: StringBuilderType = StringBuilderType.init(),
+
+ pub fn init() PathBuilder {
+ return PathBuilder{};
+ }
+
+ fn load(this: *PathBuilder) void {
+ return @call(.{ .modifier = .always_inline }, StringBuilderType.load, .{this.builder});
+ }
+
+ pub fn append(this: *PathBuilder, str: string) void {
+ return @call(.{ .modifier = .always_inline }, StringBuilderType.append, .{ this.builder, str });
+ }
+
+ pub fn pop(this: *PathBuilder, count: usize) void {
+ return @call(.{ .modifier = .always_inline }, StringBuilderType.pop, .{ this.builder, count });
+ }
+
+ pub fn str(this: *PathBuilder) string {
+ return @call(.{ .modifier = .always_inline }, StringBuilderType.str, .{this.builder});
+ }
+
+ pub fn reset(this: *PathBuilder) void {
+ return @call(.{ .modifier = .always_inline }, StringBuilderType.reset, .{this.builder});
+ }
+};
diff --git a/src/http.zig b/src/http.zig
index 8ac4ad18e..f9d1acf0e 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -59,6 +59,7 @@ const HTTPStatusCode = u10;
pub const URLPath = struct {
extname: string = "",
path: string = "",
+ pathname: string = "",
first_segment: string = "",
query_string: string = "",
@@ -119,6 +120,7 @@ pub const URLPath = struct {
return URLPath{
.extname = extname,
+ .pathname = raw_path,
.first_segment = first_segment,
.path = if (raw_path.len == 1) "." else path,
.query_string = if (question_mark_i > -1) raw_path[@intCast(usize, question_mark_i)..@intCast(usize, raw_path.len)] else "",
@@ -641,6 +643,7 @@ pub const RequestContext = struct {
pub const HandlerThread = struct {
args: Api.TransformOptions,
+ framework: Options.Framework,
existing_bundle: ?*NodeModuleBundle,
log: ?*logger.Log = null,
};
@@ -677,8 +680,9 @@ pub const RequestContext = struct {
js_ast.Expr.Data.Store.create(std.heap.c_allocator);
defer Output.flush();
- var boot = handler.args.javascript_framework_file.?;
var vm = try JavaScript.VirtualMachine.init(std.heap.c_allocator, handler.args, handler.existing_bundle, handler.log);
+
+ const boot = handler.framework.entry_point;
defer vm.deinit();
var resolved_entry_point = try vm.bundler.resolver.resolve(
@@ -750,6 +754,7 @@ pub const RequestContext = struct {
try JavaScriptHandler.spawnThread(
HandlerThread{
.args = server.transform_options,
+ .framework = server.bundler.options.framework.?,
.existing_bundle = server.bundler.options.node_modules_bundle,
.log = &server.log,
},
@@ -1668,7 +1673,7 @@ pub const Server = struct {
}
if (req_ctx.url.extname.len == 0 and !RequestContext.JavaScriptHandler.javascript_disabled) {
- if (server.transform_options.javascript_framework_file != null) {
+ if (server.bundler.options.framework != null) {
RequestContext.JavaScriptHandler.enqueue(&req_ctx, server) catch unreachable;
server.javascript_enabled = !RequestContext.JavaScriptHandler.javascript_disabled;
}
@@ -1719,24 +1724,7 @@ pub const Server = struct {
};
server.bundler = try Bundler.init(allocator, &server.log, options, null);
server.bundler.configureLinker();
-
- load_framework: {
- if (options.javascript_framework_file) |framework_file_path| {
- var framework_file = server.bundler.normalizeEntryPointPath(framework_file_path);
- var resolved = server.bundler.resolver.resolve(
- server.bundler.fs.top_level_dir,
- framework_file,
- .entry_point,
- ) catch |err| {
- Output.prettyError("Failed to load framework: {s}", .{@errorName(err)});
- Output.flush();
- server.transform_options.javascript_framework_file = null;
- break :load_framework;
- };
-
- server.transform_options.javascript_framework_file = try server.allocator.dupe(u8, resolved.path_pair.primary.text);
- }
- }
+ try server.bundler.configureRouter();
try server.initWatcher();
diff --git a/src/javascript/jsc/api/router.zig b/src/javascript/jsc/api/router.zig
new file mode 100644
index 000000000..f9c46b3e4
--- /dev/null
+++ b/src/javascript/jsc/api/router.zig
@@ -0,0 +1,134 @@
+usingnamespace @import("../base.zig");
+const std = @import("std");
+const Api = @import("../../../api/schema.zig").Api;
+const FilesystemRouter = @import("../../../router.zig");
+const JavaScript = @import("../javascript.zig");
+
+pub const Router = struct {
+ match: FilesystemRouter.RouteMap.MatchedRoute,
+ file_path_str: js.JSStringRef = null,
+ pathname_str: js.JSStringRef = null,
+
+ pub const Class = NewClass(
+ Router,
+ "Router",
+ .{
+ .finalize = finalize,
+ },
+ .{
+ .@"pathname" = .{
+ .get = getPathname,
+ .ro = true,
+ .ts = .{
+ .@"return" = "string",
+ .@"tsdoc" = "URL path as appears in a web browser's address bar",
+ },
+ },
+ .@"filepath" = .{
+ .get = getPageFilePath,
+ .ro = true,
+ .ts = .{
+ .@"return" = "string",
+ .@"tsdoc" =
+ \\Project-relative filesystem path to the route file
+ \\
+ \\@example
+ \\
+ \\```tsx
+ \\const PageComponent = (await import(route.filepath)).default;
+ \\ReactDOMServer.renderToString(<PageComponent query={route.query} />);
+ \\```
+ ,
+ },
+ },
+ .@"route" = .{
+ .@"get" = getRoute,
+ .ro = true,
+ },
+ .query = .{
+ .@"get" = getQuery,
+ .ro = true,
+ },
+ .pageFilePath = .{
+ .@"get" = getPageFilePath,
+ .ro = true,
+ },
+ },
+ false,
+ false,
+ );
+
+ pub fn getPageFilePath(
+ this: *Router,
+ ctx: js.JSContextRef,
+ thisObject: js.JSObjectRef,
+ prop: js.JSStringRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ if (this.file_path_str == null) {
+ this.file_path_str = js.JSStringCreateWithUTF8CString(this.match.file_path[0.. :0]);
+ }
+
+ return js.JSValueMakeString(ctx, this.file_path_str);
+ }
+
+ pub fn finalize(
+ this: *Router,
+ ctx: js.JSObjectRef,
+ ) void {
+ // this.deinit();
+ }
+
+ pub fn requirePage(
+ this: *Router,
+ ctx: js.JSContextRef,
+ function: js.JSObjectRef,
+ thisObject: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {}
+
+ pub fn getPathname(
+ this: *Router,
+ ctx: js.JSContextRef,
+ thisObject: js.JSObjectRef,
+ prop: js.JSStringRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ if (this.pathname_str == null) {
+ this.pathname_str = js.JSStringCreateWithUTF8CString(this.match.pathname[0.. :0]);
+ }
+
+ return js.JSValueMakeString(ctx, this.pathname_str);
+ }
+
+ pub fn getAsPath(
+ this: *Router,
+ ctx: js.JSContextRef,
+ thisObject: js.JSObjectRef,
+ prop: js.JSStringRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeString(ctx, Properties.Refs.default);
+ }
+
+ pub fn getRoute(
+ this: *Router,
+ ctx: js.JSContextRef,
+ thisObject: js.JSObjectRef,
+ prop: js.JSStringRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeString(ctx, Properties.Refs.default);
+ }
+
+ pub fn getQuery(
+ this: *Router,
+ ctx: js.JSContextRef,
+ thisObject: js.JSObjectRef,
+ prop: js.JSStringRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeString(ctx, Properties.Refs.default);
+ }
+};
diff --git a/src/javascript/jsc/base.zig b/src/javascript/jsc/base.zig
index bae18a0a3..307bf2da9 100644
--- a/src/javascript/jsc/base.zig
+++ b/src/javascript/jsc/base.zig
@@ -33,7 +33,7 @@ pub const To = struct {
return function;
}
- pub fn Finalize(n
+ pub fn Finalize(
comptime ZigContextType: type,
comptime ctxfn: fn (
this: *ZigContextType,
diff --git a/src/javascript/jsc/javascript.zig b/src/javascript/jsc/javascript.zig
index f9aae96ee..102abf925 100644
--- a/src/javascript/jsc/javascript.zig
+++ b/src/javascript/jsc/javascript.zig
@@ -768,6 +768,8 @@ pub const Module = struct {
return null;
}
+ var module = this;
+
var total_len: usize = 0;
for (arguments) |argument| {
const len = js.JSStringGetLength(argument);
@@ -785,7 +787,6 @@ pub const Module = struct {
const end = js.JSStringGetUTF8CString(argument, require_buf.list.items.ptr, require_buf.list.items.len);
total_len += end;
const import_path = require_buf.list.items[0 .. end - 1];
- var module = this;
if (this.vm.bundler.linker.resolver.resolve(module.path.name.dirWithTrailingSlash(), import_path, .require)) |resolved| {
var load_result = Module.loadFromResolveResult(this.vm, ctx, resolved, exception) catch |err| {
@@ -835,7 +836,7 @@ pub const Module = struct {
for (arguments) |argument| {
const end = js.JSStringGetUTF8CString(argument, remainder.ptr, total_len - used_len);
used_len += end;
- remainder[end - 1] = ",";
+ remainder[end - 1] = ',';
remainder = remainder[end..];
}
@@ -1126,7 +1127,7 @@ pub const Module = struct {
}
var module: *Module = undefined;
- if (needs_reload) {
+ if (reload_pending) {
module = vm.require_cache.get(hash).?;
} else {
module = try vm.allocator.create(Module);
@@ -1134,12 +1135,12 @@ pub const Module = struct {
}
errdefer {
- if (!needs_reload) {
+ if (!reload_pending) {
vm.allocator.destroy(module);
}
}
- if (needs_reload) {
+ if (reload_pending) {
try Module.load(
module,
vm,
@@ -1345,7 +1346,7 @@ pub const EventListenerMixin = struct {
fetch,
err,
- const SizeMatcher = strings.ExactSizeMatcher("fetch".len);
+ const SizeMatcher = strings.ExactSizeMatcher(8);
pub fn match(str: string) ?EventType {
return switch (SizeMatcher.match(str)) {
diff --git a/src/javascript/jsc/webcore/response.zig b/src/javascript/jsc/webcore/response.zig
index 669d55dfe..9e73d3113 100644
--- a/src/javascript/jsc/webcore/response.zig
+++ b/src/javascript/jsc/webcore/response.zig
@@ -2,7 +2,7 @@ usingnamespace @import("../base.zig");
const std = @import("std");
const Api = @import("../../../api/schema.zig").Api;
const http = @import("../../../http.zig");
-pub const JavaScript = @import("../javascript.zig");
+const JavaScript = @import("../javascript.zig");
pub const Response = struct {
pub const Class = NewClass(
Response,
@@ -1129,5 +1129,4 @@ pub const FetchEvent = struct {
) js.JSValueRef {
return js.JSValueMakeUndefined(ctx);
}
-
-}; \ No newline at end of file
+};
diff --git a/src/options.zig b/src/options.zig
index 50f3efb11..5cb32f4ef 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -1,3 +1,5 @@
+/// This file is mostly the API schema but with all the options normalized.
+/// Normalization is necessary because most fields in the API schema are optional
const std = @import("std");
const logger = @import("logger.zig");
const Fs = @import("fs.zig");
@@ -8,7 +10,6 @@ const Api = api.Api;
const defines = @import("./defines.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
const NodeModuleBundle = @import("./node_module_bundle.zig").NodeModuleBundle;
-
usingnamespace @import("global.zig");
const assert = std.debug.assert;
@@ -25,8 +26,8 @@ pub fn validatePath(log: *logger.Log, fs: *Fs.FileSystem.Implementation, cwd: st
}
const paths = [_]string{ cwd, rel_path };
const out = std.fs.path.resolve(allocator, &paths) catch |err| {
- log.addErrorFmt(null, logger.Loc{}, allocator, "Invalid {s}: {s}", .{ path_kind, rel_path }) catch unreachable;
- Global.panic("", .{});
+ Global.invariant(false, "<r><red>{s}<r> resolving external: <b>\"{s}\"<r>", .{ @errorName(err), rel_path });
+ return "";
};
return out;
@@ -597,6 +598,8 @@ pub fn loadersFromTransformOptions(allocator: *std.mem.Allocator, _loaders: ?Api
return loaders;
}
+/// BundleOptions is used when ResolveMode is not set to "disable".
+/// BundleOptions is effectively webpack + babel
pub const BundleOptions = struct {
footer: string = "",
banner: string = "",
@@ -632,6 +635,8 @@ pub const BundleOptions = struct {
extension_order: []const string = &Defaults.ExtensionOrder,
out_extensions: std.StringHashMap(string),
import_path_format: ImportPathFormat = ImportPathFormat.relative,
+ framework: ?Framework = null,
+ route_config: ?RouteConfig = null,
pub fn asJavascriptBundleConfig(this: *const BundleOptions) Api.JavascriptBundleConfig {}
@@ -701,6 +706,14 @@ pub const BundleOptions = struct {
opts.external = ExternalModules.init(allocator, &fs.fs, fs.top_level_dir, transform.external, log, opts.platform);
opts.out_extensions = opts.platform.outExtensions(allocator);
+ if (transform.framework) |_framework| {
+ opts.framework = try Framework.fromApi(_framework);
+ }
+
+ if (transform.router) |route_config| {
+ opts.route_config = try RouteConfig.fromApi(route_config, allocator);
+ }
+
if (transform.serve orelse false) {
opts.preserve_extensions = true;
opts.append_package_version_in_query_string = true;
@@ -720,25 +733,25 @@ pub const BundleOptions = struct {
defer allocator.free(check_static);
std.fs.accessAbsolute(check_static, .{}) catch {
- Output.printError("warn: \"public\" folder missing. If there are external assets used in your project, pass --public-dir=\"public-folder-name\"", .{});
+ Output.prettyErrorln("warn: \"public\" folder missing. If there are external assets used in your project, pass --public-dir=\"public-folder-name\"", .{});
did_warn = true;
};
}
if (!did_warn) {
- Output.printError("warn: \"public\" folder missing. If you want to use \"static\" as the public folder, pass --public-dir=\"static\".", .{});
+ Output.prettyErrorln("warn: \"public\" folder missing. If you want to use \"static\" as the public folder, pass --public-dir=\"static\".", .{});
}
opts.public_dir_enabled = false;
},
error.AccessDenied => {
- Output.printError(
+ Output.prettyErrorln(
"error: access denied when trying to open public_dir: \"{s}\".\nPlease re-open Speedy with access to this folder or pass a different folder via \"--public-dir\". Note: --public-dir is relative to --cwd (or the process' current working directory).\n\nThe public folder is where static assets such as images, fonts, and .html files go.",
.{opts.public_dir},
);
std.process.exit(1);
},
else => {
- Output.printError(
+ Output.prettyErrorln(
"error: \"{s}\" when accessing public folder: \"{s}\"",
.{ @errorName(err), opts.public_dir },
);
@@ -749,7 +762,7 @@ pub const BundleOptions = struct {
break :brk null;
};
- // Windows has weird locking rules for files
+ // Windows has weird locking rules for file access.
// so it's a bad idea to keep a file handle open for a long time on Windows.
if (isWindows and opts.public_dir_handle != null) {
opts.public_dir_handle.?.close();
@@ -1084,3 +1097,73 @@ pub const TransformResult = struct {
};
}
};
+
+pub const Framework = struct {
+ entry_point: string,
+
+ pub fn fromApi(
+ transform: Api.FrameworkConfig,
+ ) !Framework {
+ return Framework{
+ .entry_point = transform.entry_point.?,
+ };
+ }
+};
+
+pub const RouteConfig = struct {
+ ///
+ dir: string,
+ // TODO: do we need a separate list for data-only extensions?
+ // e.g. /foo.json just to get the data for the route, without rendering the html
+ // I think it's fine to hardcode as .json for now, but if I personally were writing a framework
+ // I would consider using a custom binary format to minimize request size
+ // maybe like CBOR
+ extensions: []const string,
+
+ pub const DefaultDir = "pages";
+ pub const DefaultExtensions = [_]string{ "tsx", "ts", "mjs", "jsx", "js" };
+
+ pub fn fromApi(router_: Api.RouteConfig, allocator: *std.mem.Allocator) !RouteConfig {
+ var router = RouteConfig{
+ .dir = DefaultDir,
+ .extensions = std.mem.span(&DefaultExtensions),
+ };
+
+ var router_dir: string = std.mem.trimRight(u8, router_.dir orelse "", "/\\");
+
+ if (router_dir.len != 0) {
+ router.dir = router_dir;
+ }
+
+ if (router_.extensions.len > 0) {
+ var count: usize = 0;
+ for (router_.extensions) |_ext| {
+ const ext = std.mem.trimLeft(u8, _ext, ".");
+
+ if (ext.len == 0) {
+ continue;
+ }
+
+ count += 1;
+ }
+
+ var extensions = try allocator.alloc(string, count);
+ var remainder = extensions;
+
+ for (router_.extensions) |_ext| {
+ const ext = std.mem.trimLeft(u8, _ext, ".");
+
+ if (ext.len == 0) {
+ continue;
+ }
+
+ remainder[0] = ext;
+ remainder = remainder[1..];
+ }
+
+ router.extensions = extensions;
+ }
+
+ return router;
+ }
+};
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 8d0f1249d..c0faeda2e 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -57,7 +57,7 @@ pub const PackageJSON = struct {
const parts = [_]string{ input_path, "package.json" };
const package_json_path_ = r.fs.abs(&parts);
- const package_json_path = r.fs.filename_store.append(package_json_path_) catch unreachable;
+ const package_json_path = r.fs.filename_store.append(@TypeOf(package_json_path_), package_json_path_) catch unreachable;
const entry = r.caches.fs.readFile(
r.fs,
diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig
index 1bba7d12c..a2ed2773d 100644
--- a/src/resolver/resolve_path.zig
+++ b/src/resolver/resolve_path.zig
@@ -682,7 +682,7 @@ pub fn joinStringBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [
}
// Preserve leading separator
- if (_parts[0][0] == _platform.separator()) {
+ if (_parts[0].len > 0 and _parts[0][0] == _platform.separator()) {
const out = switch (platform) {
.loose => normalizeStringLooseBuf(parser_join_input_buffer[0..written], buf[1..], false, false),
.windows => normalizeStringWindows(parser_join_input_buffer[0..written], buf[1..], false, false),
@@ -769,7 +769,7 @@ pub fn joinAbsStringBuf(_cwd: []const u8, buf: []u8, _parts: anytype, comptime _
const offset = out;
out += normalized_part.len;
- std.debug.assert(out < buf.len);
+ std.debug.assert(out <= buf.len);
std.mem.copy(u8, buf[offset..out], normalized_part);
}
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index d27c3b0a6..b4507445a 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -76,6 +76,18 @@ pub const DirInfo = struct {
}
}
+ pub fn getEntriesConst(dirinfo: *const DirInfo) ?*const Fs.FileSystem.DirEntry {
+ const entries_ptr = Fs.FileSystem.instance.fs.entries.atIndex(dirinfo.entries) orelse return null;
+ switch (entries_ptr.*) {
+ .entries => |entr| {
+ return &entries_ptr.entries;
+ },
+ .err => {
+ return null;
+ },
+ }
+ }
+
pub fn getParent(i: *const DirInfo) ?*DirInfo {
return HashMap.instance.atIndex(i.parent);
}
@@ -588,7 +600,7 @@ pub fn NewResolver(cache_files: bool) type {
}
return Result{
- .path_pair = .{ .primary = Path.init(r.fs.filename_store.append(abs_path) catch unreachable) },
+ .path_pair = .{ .primary = Path.init(r.fs.filename_store.append(@TypeOf(abs_path), abs_path) catch unreachable) },
.is_external = true,
};
}
@@ -603,7 +615,7 @@ pub fn NewResolver(cache_files: bool) type {
if (r.checkBrowserMap(pkg, rel_path)) |remap| {
// Is the path disabled?
if (remap.len == 0) {
- var _path = Path.init(r.fs.filename_store.append(abs_path) catch unreachable);
+ var _path = Path.init(r.fs.filename_store.append(string, abs_path) catch unreachable);
_path.is_disabled = true;
return Result{
.path_pair = PathPair{
@@ -872,14 +884,14 @@ pub fn NewResolver(cache_files: bool) type {
// this might leak
if (!std.fs.path.isAbsolute(result.base_url)) {
const paths = [_]string{ file_dir, result.base_url };
- result.base_url = r.fs.filename_store.append(r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
+ result.base_url = r.fs.filename_store.append(string, r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
}
}
if (result.paths.count() > 0 and (result.base_url_for_paths.len == 0 or !std.fs.path.isAbsolute(result.base_url_for_paths))) {
// this might leak
const paths = [_]string{ file_dir, result.base_url };
- result.base_url_for_paths = r.fs.filename_store.append(r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
+ result.base_url_for_paths = r.fs.filename_store.append(string, r.fs.absBuf(&paths, &tsconfig_base_url_buf)) catch unreachable;
}
return result;
@@ -904,7 +916,28 @@ pub fn NewResolver(cache_files: bool) type {
}
}
- fn dirInfoCached(r: *ThisResolver, path: string) !?*DirInfo {
+ fn dirInfoCached(
+ r: *ThisResolver,
+ path: string,
+ ) !?*DirInfo {
+ return try r.dirInfoCachedMaybeLog(path, true);
+ }
+
+ pub fn readDirInfo(
+ r: *ThisResolver,
+ path: string,
+ ) !?*DirInfo {
+ return try r.dirInfoCachedMaybeLog(path, false);
+ }
+
+ pub fn readDirInfoIgnoreError(
+ r: *ThisResolver,
+ path: string,
+ ) ?*const DirInfo {
+ return r.dirInfoCachedMaybeLog(path, false) catch null;
+ }
+
+ inline fn dirInfoCachedMaybeLog(r: *ThisResolver, path: string, comptime enable_logging: bool) !?*DirInfo {
const top_result = try r.dir_cache.getOrPut(path);
if (top_result.status != .unknown) {
return r.dir_cache.atIndex(top_result.index);
@@ -1021,18 +1054,20 @@ pub fn NewResolver(cache_files: bool) type {
var cached_dir_entry_result = rfs.entries.getOrPut(queue_top.unsafe_path) catch unreachable;
r.dir_cache.markNotFound(queue_top.result);
rfs.entries.markNotFound(cached_dir_entry_result);
- const pretty = r.prettyPath(Path.init(queue_top.unsafe_path));
-
- r.log.addErrorFmt(
- null,
- logger.Loc{},
- r.allocator,
- "Cannot read directory \"{s}\": {s}",
- .{
- pretty,
- @errorName(err),
- },
- ) catch {};
+ if (comptime enable_logging) {
+ const pretty = r.prettyPath(Path.init(queue_top.unsafe_path));
+
+ r.log.addErrorFmt(
+ null,
+ logger.Loc{},
+ r.allocator,
+ "Cannot read directory \"{s}\": {s}",
+ .{
+ pretty,
+ @errorName(err),
+ },
+ ) catch {};
+ }
},
}
@@ -1043,9 +1078,15 @@ pub fn NewResolver(cache_files: bool) type {
_open_dirs[open_dir_count] = open_dir;
open_dir_count += 1;
+ // ensure trailing slash
if (_safe_path == null) {
// Now that we've opened the topmost directory successfully, it's reasonable to store the slice.
- _safe_path = try r.fs.dirname_store.append(path);
+ if (path[path.len - 1] != '/') {
+ var parts = [_]string{ path, "/" };
+ _safe_path = try r.fs.dirname_store.append(@TypeOf(parts), parts);
+ } else {
+ _safe_path = try r.fs.dirname_store.append(string, path);
+ }
}
const safe_path = _safe_path.?;
@@ -1609,7 +1650,10 @@ pub fn NewResolver(cache_files: bool) type {
const dir_path = std.fs.path.dirname(path) orelse "/";
- const dir_entry: *Fs.FileSystem.RealFS.EntriesOption = rfs.readDirectory(dir_path, null, false) catch {
+ const dir_entry: *Fs.FileSystem.RealFS.EntriesOption = rfs.readDirectory(
+ dir_path,
+ null,
+ ) catch {
return null;
};
@@ -1644,7 +1688,7 @@ pub fn NewResolver(cache_files: bool) type {
debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {};
}
const abs_path_parts = [_]string{ query.entry.dir, query.entry.base };
- const abs_path = r.fs.filename_store.append(r.fs.joinBuf(&abs_path_parts, &TemporaryBuffer.ExtensionPathBuf)) catch unreachable;
+ const abs_path = r.fs.filename_store.append(string, r.fs.joinBuf(&abs_path_parts, &TemporaryBuffer.ExtensionPathBuf)) catch unreachable;
return LoadResult{
.path = abs_path,
@@ -1673,7 +1717,7 @@ pub fn NewResolver(cache_files: bool) type {
// now that we've found it, we allocate it.
return LoadResult{
- .path = r.fs.filename_store.append(buffer) catch unreachable,
+ .path = r.fs.filename_store.append(@TypeOf(buffer), buffer) catch unreachable,
.diff_case = query.diff_case,
.dirname_fd = entries.fd,
};
@@ -1715,7 +1759,7 @@ pub fn NewResolver(cache_files: bool) type {
}
return LoadResult{
- .path = r.fs.filename_store.append(buffer) catch unreachable,
+ .path = r.fs.filename_store.append(@TypeOf(buffer), buffer) catch unreachable,
.diff_case = query.diff_case,
.dirname_fd = entries.fd,
};
@@ -1752,6 +1796,7 @@ pub fn NewResolver(cache_files: bool) type {
var info = DirInfo{
.abs_path = path,
+ // .abs_real_path = path,
.parent = parent_index,
.entries = dir_entry_index,
};
@@ -1787,7 +1832,7 @@ pub fn NewResolver(cache_files: bool) type {
} else if (parent.?.abs_real_path.len > 0) {
// this might leak a little i'm not sure
const parts = [_]string{ parent.?.abs_real_path, base };
- symlink = r.fs.filename_store.append(r.fs.joinBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
+ symlink = r.fs.filename_store.append(string, r.fs.joinBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
if (r.debug_logs) |*logs| {
try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
diff --git a/src/router.zig b/src/router.zig
new file mode 100644
index 000000000..67a203d09
--- /dev/null
+++ b/src/router.zig
@@ -0,0 +1,644 @@
+// This is a Next.js-compatible file-system router.
+// It uses the filesystem to infer entry points.
+// Despite being Next.js-compatible, it's not tied to Next.js.
+// It does not handle the framework parts of rendering pages.
+// All it does is resolve URL paths to the appropriate entry point and parse URL params/query.
+const Router = @This();
+
+const std = @import("std");
+const DirInfo = @import("./resolver/resolver.zig").DirInfo;
+usingnamespace @import("global.zig");
+const Fs = @import("./fs.zig");
+const Options = @import("./options.zig");
+const allocators = @import("./allocators.zig");
+const URLPath = @import("./http.zig").URLPath;
+
+const index_route_hash = @truncate(u32, std.hash.Wyhash.hash(0, "index"));
+const arbitrary_max_route = 4096;
+
+dir: StoredFileDescriptorType = 0,
+routes: RouteMap,
+loaded_routes: bool = false,
+allocator: *std.mem.Allocator,
+fs: *Fs.FileSystem,
+config: Options.RouteConfig,
+
+pub fn init(
+ fs: *Fs.FileSystem,
+ allocator: *std.mem.Allocator,
+ config: Options.RouteConfig,
+) !Router {
+ return Router{
+ .routes = RouteMap{
+ .routes = Route.List{},
+ .index = null,
+ .allocator = allocator,
+ .config = config,
+ },
+ .fs = fs,
+ .allocator = allocator,
+ .config = config,
+ };
+}
+
+pub fn getEntryPoints(this: *const Router, allocator: *std.mem.Allocator) ![]const string {
+ var i: u16 = 0;
+ const route_count: u16 = @truncate(u16, this.routes.routes.len);
+
+ var count: usize = 0;
+ var str_len: usize = 0;
+
+ while (i < route_count) : (i += 1) {
+ const children = this.routes.routes.items(.children)[i];
+ count += @intCast(
+ usize,
+ @boolToInt(children.len == 0),
+ );
+ if (children.len == 0) {
+ if (Fs.FileSystem.DirEntry.EntryStore.instance.at(this.routes.routes.items(.entry_index)[i])) |entry| {
+ str_len += entry.base.len + entry.dir.len;
+ }
+ }
+ }
+
+ var buffer = try allocator.alloc(u8, str_len + count);
+ var remain = buffer;
+ var entry_points = try allocator.alloc(string, count);
+
+ i = 0;
+ var entry_point_i: usize = 0;
+ while (i < route_count) : (i += 1) {
+ const children = this.routes.routes.items(.children)[i];
+ if (children.len == 0) {
+ if (Fs.FileSystem.DirEntry.EntryStore.instance.at(this.routes.routes.items(.entry_index)[i])) |entry| {
+ var parts = [_]string{ entry.dir, entry.base };
+ entry_points[entry_point_i] = this.fs.absBuf(&parts, remain);
+ remain = remain[entry_points[entry_point_i].len..];
+ entry_point_i += 1;
+ }
+ }
+ }
+
+ return entry_points;
+}
+
+const banned_dirs = [_]string{
+ "node_modules",
+};
+
+// This loads routes recursively, in depth-first order.
+// it does not currently handle duplicate exact route matches. that's undefined behavior, for now.
+pub fn loadRoutes(
+ this: *Router,
+ root_dir_info: *const DirInfo,
+ comptime ResolverType: type,
+ resolver: *ResolverType,
+ parent: u16,
+ comptime is_root: bool,
+) anyerror!void {
+ var fs = &this.fs.fs;
+ if (root_dir_info.getEntriesConst()) |entries| {
+ var iter = entries.data.iterator();
+ outer: while (iter.next()) |entry_ptr| {
+ const entry = Fs.FileSystem.DirEntry.EntryStore.instance.at(entry_ptr.value) orelse continue;
+ if (entry.base[0] == '.') {
+ continue :outer;
+ }
+
+ switch (entry.kind(fs)) {
+ .dir => {
+ inline for (banned_dirs) |banned_dir| {
+ if (strings.eqlComptime(entry.base, comptime banned_dir)) {
+ continue :outer;
+ }
+ }
+ var abs_parts = [_]string{ entry.dir, entry.base };
+ if (resolver.readDirInfoIgnoreError(this.fs.abs(&abs_parts))) |_dir_info| {
+ const dir_info: *const DirInfo = _dir_info;
+
+ var route: Route = Route.parse(
+ entry.base,
+ entry.dir[this.config.dir.len..],
+ "",
+ entry_ptr.value,
+ );
+
+ route.parent = parent;
+ route.children.offset = @truncate(u16, this.routes.routes.len);
+ try this.routes.routes.append(this.allocator, route);
+
+ // potential stack overflow!
+ try this.loadRoutes(
+ dir_info,
+ ResolverType,
+ resolver,
+ route.children.offset,
+ false,
+ );
+
+ this.routes.routes.items(.children)[route.children.offset].len = @truncate(u16, this.routes.routes.len) - route.children.offset;
+ }
+ },
+
+ .file => {
+ const extname = std.fs.path.extension(entry.base);
+ // exclude "." or ""
+ if (extname.len < 2) continue;
+
+ for (this.config.extensions) |_extname| {
+ if (strings.eql(extname[1..], _extname)) {
+ var route = Route.parse(
+ entry.base,
+ entry.dir[this.config.dir.len..],
+ extname,
+ entry_ptr.value,
+ );
+ route.parent = parent;
+
+ if (comptime is_root) {
+ if (strings.eqlComptime(route.name, "index")) {
+ this.routes.index = @truncate(u32, this.routes.routes.len);
+ }
+ }
+
+ try this.routes.routes.append(
+ this.allocator,
+ route,
+ );
+ }
+ }
+ },
+ }
+ }
+ }
+}
+
+const TinyPtr = packed struct {
+ offset: u16 = 0,
+ len: u16 = 0,
+};
+
+const Param = struct {
+ key: string,
+ value: string,
+
+ pub const List = std.MultiArrayList(Param);
+};
+
+pub const Route = struct {
+ part: RoutePart,
+ name: string,
+ path: string,
+ hash: u32,
+ children: Ptr = Ptr{},
+ parent: u16 = top_level_parent,
+ entry_index: allocators.IndexType,
+
+ full_hash: u32,
+
+ pub const top_level_parent = std.math.maxInt(u16);
+
+ pub const List = std.MultiArrayList(Route);
+ pub const Ptr = TinyPtr;
+
+ pub fn parse(base: string, dir: string, extname: string, entry_index: allocators.IndexType) Route {
+ var parts = [_]string{ dir, base };
+ // this isn't really absolute, it's relative to the pages dir
+ const absolute = Fs.FileSystem.instance.abs(&parts);
+ const name = base[0 .. base.len - extname.len];
+
+ return Route{
+ .name = name,
+ .path = base,
+ .entry_index = entry_index,
+ .hash = @truncate(
+ u32,
+ std.hash.Wyhash.hash(
+ 0,
+ name,
+ ),
+ ),
+ .full_hash = @truncate(
+ u32,
+ std.hash.Wyhash.hash(
+ 0,
+ absolute[0 .. absolute.len - extname.len],
+ ),
+ ),
+ .part = RoutePart.parse(name),
+ };
+ }
+};
+
+// Reference: https://nextjs.org/docs/routing/introduction
+// Examples:
+// - pages/index.js => /
+// - pages/foo.js => /foo
+// - pages/foo/index.js => /foo
+// - pages/foo/[bar] => {/foo/bacon, /foo/bar, /foo/baz, /foo/10293012930}
+// - pages/foo/[...bar] => {/foo/bacon/toast, /foo/bar/what, /foo/baz, /foo/10293012930}
+// Syntax:
+// - [param-name]
+// - Catch All: [...param-name]
+// - Optional Catch All: [[...param-name]]
+// Invalid syntax:
+// - pages/foo/hello-[bar]
+// - pages/foo/[bar]-foo
+pub const RouteMap = struct {
+ routes: Route.List,
+ index: ?u32,
+ allocator: *std.mem.Allocator,
+ config: Options.RouteConfig,
+
+ pub threadlocal var segments_buf: [128]string = undefined;
+ pub threadlocal var segments_hash: [128]u32 = undefined;
+
+ pub fn routePathLen(this: *const RouteMap, _ptr: u16) u16 {
+ return this.appendRoutePath(_ptr, &[_]u8{}, false);
+ }
+
+ // This is probably really slow
+ // But it might be fine because it's mostly looking up within the same array
+ // and that array is probably in the cache line
+ var ptr_buf: [arbitrary_max_route]u16 = undefined;
+ // TODO: skip copying parent dirs when it's another file in the same parent dir
+ pub fn appendRoutePath(this: *const RouteMap, tail: u16, buf: []u8, comptime write: bool) u16 {
+ var head: u16 = this.routes.items(.parent)[tail];
+
+ var ptr_buf_count: i32 = 0;
+ var written: u16 = 0;
+ while (!(head == Route.top_level_parent)) : (ptr_buf_count += 1) {
+ ptr_buf[@intCast(usize, ptr_buf_count)] = head;
+ head = this.routes.items(.parent)[head];
+ }
+
+ var i: usize = @intCast(usize, ptr_buf_count);
+ var remain = buf;
+ while (i > 0) : (i -= 1) {
+ const path = this.routes.items(.path)[
+ @intCast(
+ usize,
+ ptr_buf[i],
+ )
+ ];
+ if (comptime write) {
+ std.mem.copy(u8, remain, path);
+
+ remain = remain[path.len..];
+ remain[0] = std.fs.path.sep;
+ remain = remain[1..];
+ }
+ written += @truncate(u16, path.len + 1);
+ }
+
+ {
+ const path = this.routes.items(.path)[tail];
+ if (comptime write) {
+ std.mem.copy(u8, remain, path);
+ }
+ written += @truncate(u16, path.len);
+ }
+
+ return written;
+ }
+
+ pub const MatchedRoute = struct {
+ /// normalized url path from the request
+ path: string,
+ /// raw url path from the request
+ pathname: string,
+ /// absolute filesystem path to the entry point
+ file_path: string,
+ /// route name, like `"posts/[id]"`
+ name: string,
+
+ /// basename of the route in the file system, including file extension
+ basename: string,
+
+ hash: u32,
+ params: *Param.List,
+ redirect_path: ?string = null,
+ query_string: string = "",
+ };
+
+ const MatchContext = struct {
+ params: *Param.List,
+ segments: []string,
+ hashes: []u32,
+ map: *RouteMap,
+ allocator: *std.mem.Allocator,
+ redirect_path: ?string = "",
+ url_path: URLPath,
+
+ matched_route_name: PathBuilder = PathBuilder.init(),
+ matched_route_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined,
+
+ file_path: string = "",
+
+ pub fn matchDynamicRoute(
+ this: *MatchContext,
+ head_i: u16,
+ segment_i: u16,
+ ) ?MatchedRoute {
+ var match = this._matchDynamicRoute(head_i, segment_i) orelse return null;
+ this.matched_route_name.append("/");
+ this.matched_route_name.append(match.name);
+ return match;
+ }
+
+ fn _matchDynamicRoute(
+ this: *MatchContext,
+ head_i: u16,
+ segment_i: u16,
+ ) ?MatchedRoute {
+ const start_len = this.params.len;
+ var head = this.map.routes.get(head_i);
+ const segment = this.segments[segment_i];
+ const remaining = this.segments[segment_i..];
+
+ if (remaining.len > 0 and head.children.len == 0) {
+ return null;
+ }
+
+ switch (head.part.tag) {
+ .exact => {
+ if (this.hashes[segment_i] != head.hash) {
+ return null;
+ }
+ },
+ else => {},
+ }
+
+ var match_result: MatchedRoute = undefined;
+ if (head.children.len > 0 and remaining.len > 0) {
+ var child_i = head.children.offset;
+ const last = child_i + head.children.len;
+ var matched = false;
+ while (child_i < last) : (child_i += 1) {
+ if (this.matchDynamicRoute(child_i, segment_i + 1)) |res| {
+ match_result = res;
+ matched = true;
+ break;
+ }
+ }
+
+ if (!matched) {
+ this.params.shrinkRetainingCapacity(start_len);
+ return null;
+ }
+ // this is a folder
+ } else if (remaining.len == 0 and head.children.len > 0) {
+ this.params.shrinkRetainingCapacity(start_len);
+ return null;
+ } else {
+ match_result = MatchedRoute{
+ .path = head.path,
+ .name = head.name,
+ .params = this.params,
+ .hash = head.full_hash,
+ .query_string = this.url_path.query_string,
+ .pathname = this.url_path.pathname,
+ };
+
+ if (Fs.FileSystem.DirEntry.EntryStore.instance.at(head.entry_index)) |entry| {
+ var parts = [_]string{ entry.dir, entry.base };
+ match_result.file_path = Fs.FileSystem.instance.absBuf(&parts, this.matched_route_buf);
+ this.matched_route_buf[match_result.file_path.len] = 0;
+ }
+ }
+
+ // Now that we know for sure the route will match, we append the param
+ switch (head.part.tag) {
+ .param => {
+ this.params.append(
+ this.allocator,
+ .{
+ .key = head.part.str(head.name),
+ .value = segment,
+ },
+ ) catch unreachable;
+ },
+ else => {},
+ }
+
+ return match_result;
+ }
+ };
+
+ // This makes many passes over the list of routes
+ // However, most of those passes are basically array.indexOf(number) and then smallerArray.indexOf(number)
+ pub fn matchPage(this: *RouteMap, url_path: URLPath, params: *Param.List) ?MatchedRoute {
+ // Trim trailing slash
+ var path = url_path.path;
+ var redirect = false;
+
+ // Normalize trailing slash
+ // "/foo/bar/index/" => "/foo/bar/index"
+ if (path.len > 0 and path[path.len - 1] == '/') {
+ path = path[0 .. path.len - 1];
+ redirect = true;
+ }
+
+ // Normal case: "/foo/bar/index" => "/foo/bar"
+ // Pathological: "/foo/bar/index/index/index/index/index/index" => "/foo/bar"
+ // Extremely pathological: "/index/index/index/index/index/index/index" => "index"
+ while (strings.endsWith(path, "/index")) {
+ path = path[0 .. path.len - "/index".len];
+ redirect = true;
+ }
+
+ if (strings.eqlComptime(path, "index")) {
+ path = "";
+ redirect = true;
+ }
+
+ if (path.len == 0) {
+ if (this.index) |index| {
+ return MatchedRoute{
+ .params = params,
+ .name = "index",
+ .path = this.routes.items(.path)[index],
+ .pathname = url_path.pathname,
+ .hash = index_route_hash,
+ .query_string = url_path.query_string,
+ };
+ }
+
+ return null;
+ }
+
+ const full_hash = @truncate(u32, std.hash.Wyhash.hash(0, path));
+
+ // Check for an exact match
+ // These means there are no params.
+ if (std.mem.indexOfScalar(u32, this.routes.items(.full_hash), full_hash)) |exact_match| {
+ const route = this.routes.get(exact_match);
+ // It might be a folder with an index route
+ // /bacon/index.js => /bacon
+ if (route.children.len > 0) {
+ const children = this.routes.items(.hash)[route.children.offset .. route.children.offset + route.children.len];
+ for (children) |child_hash, i| {
+ if (child_hash == index_route_hash) {
+ return MatchedRoute{
+ .params = params,
+ .name = this.routes.items(.name)[i],
+ .path = this.routes.items(.path)[i],
+ .pathname = url_path.pathname,
+ .hash = child_hash,
+ .query_string = url_path.query_string,
+ };
+ }
+ }
+ // It's an exact route, there are no params
+ // /foo/bar => /foo/bar.js
+ } else {
+ return MatchedRoute{
+ .params = params,
+ .name = route.name,
+ .path = route.path,
+ .redirect_path = if (redirect) path else null,
+ .hash = full_hash,
+ .pathname = url_path.pathname,
+ .query_string = url_path.query_string,
+ };
+ }
+ }
+
+ var last_slash_i: usize = 0;
+ var segments: []string = segments_buf[0..];
+ var hashes: []u32 = segments_hash[0..];
+ var segment_i: usize = 0;
+ for (path) |i, c| {
+ if (c == '/') {
+ // if the URL is /foo/./foo
+ // rewrite it as /foo/foo
+ segments[segment_i] = path[last_slash_i..i];
+ hashes[segment_i] = @truncate(u32, std.hash.Wyhash.hash(0, segments[segment_i]));
+
+ if (!(segments[segment_i].len == 1 and segments[segment_i][0] == '.')) {
+ segment_i += 1;
+ }
+
+ last_slash_i = i + 1;
+ }
+ }
+ segments = segments[0..segment_i];
+
+ var ctx = MatchContext{
+ .params = params,
+ .segments = segments,
+ .hashes = hashes,
+ .map = this,
+ .redirect_path = if (redirect) path else null,
+ .allocator = this.allocator,
+ .url_path = url_path,
+ };
+
+ if (ctx.matchDynamicRoute(0, 0)) |dynamic_route| {
+ dynamic_route.name = ctx.matched_route_name.str();
+ return dynamic_route;
+ }
+
+ return null;
+ }
+};
+
+// This is a u32
+pub const RoutePart = packed struct {
+ name: Ptr,
+ tag: Tag,
+
+ pub fn str(this: RoutePart, name: string) string {
+ return switch (this.tag) {
+ .exact => name,
+ else => name[this.name.offset..][0..this.name.len],
+ };
+ }
+
+ pub const Ptr = packed struct {
+ offset: u14,
+ len: u14,
+ };
+
+ pub const Tag = enum(u4) {
+ optional_catch_all = 1,
+ catch_all = 2,
+ param = 3,
+ exact = 4,
+ };
+
+ pub fn parse(base: string) RoutePart {
+ std.debug.assert(base.len > 0);
+
+ var part = RoutePart{
+ .name = Ptr{ .offset = 0, .len = @truncate(u14, base.len) },
+ .tag = .exact,
+ };
+
+ if (base[0] == '[') {
+ if (base.len > 1) {
+ switch (base[1]) {
+ ']' => {},
+
+ '[' => {
+ // optional catch all
+ if (strings.eqlComptime(base[1..std.math.min(base.len, 5)], "[...")) {
+ part.name.len = @truncate(u14, std.mem.indexOfScalar(u8, base[5..], ']') orelse return part);
+ part.name.offset = 5;
+ part.tag = .optional_catch_all;
+ }
+ },
+ '.' => {
+ // regular catch all
+ if (strings.eqlComptime(base[1..std.math.min(base.len, 4)], "...")) {
+ part.name.len = @truncate(u14, std.mem.indexOfScalar(u8, base[4..], ']') orelse return part);
+ part.name.offset = 4;
+ part.tag = .catch_all;
+ }
+ },
+ else => {
+ part.name.len = @truncate(u14, std.mem.indexOfScalar(u8, base[1..], ']') orelse return part);
+ part.tag = .param;
+ part.name.offset = 1;
+ },
+ }
+ }
+ }
+
+ return part;
+ }
+};
+
+threadlocal var params_list: Param.List = undefined;
+pub fn match(app: *Router, comptime RequestContextType: type, ctx: *RequestContextType) !void {
+ // If there's an extname assume it's an asset and not a page
+ switch (ctx.url.extname.len) {
+ 0 => {},
+ // json is used for updating the route client-side without a page reload
+ "json".len => {
+ if (!strings.eqlComptime(ctx.url.extname, "json")) {
+ try ctx.handleRequest();
+ return;
+ }
+ },
+ else => {
+ try ctx.handleRequest();
+ return;
+ },
+ }
+
+ params_list.shrinkRetainingCapacity(0);
+ if (app.routes.matchPage(0, ctx.url, &params_list)) |route| {
+ if (route.redirect_path) |redirect| {
+ try ctx.handleRedirect(redirect);
+ return;
+ }
+
+ std.debug.assert(route.path.len > 0);
+
+ // ??? render javascript ??
+ try ctx.handleRoute(route);
+ }
+
+ try ctx.handleRequest();
+}
diff --git a/src/strings.zig b/src/strings.zig
index 4813c1f91..10fa432d6 100644
--- a/src/strings.zig
+++ b/src/strings.zig
@@ -9,6 +9,46 @@ pub const MutableString = mutable.MutableString;
pub const eql = std.meta.eql;
+pub fn NewStringBuilder(comptime size: usize) type {
+ return struct {
+ const This = @This();
+ buffer: [size + 1]u8 = undefined,
+ remain: []u8 = undefined,
+
+ pub fn init() This {
+ var instance = This{};
+ instance.load();
+ return instance;
+ }
+
+ fn load(this: *This) void {
+ this.remain = (&this.buffer)[0..size];
+ }
+
+ pub fn append(this: *This, str: string) void {
+ std.mem.copy(u8, this.remain, str);
+ this.remain = this.remain[str.len..];
+ }
+
+ pub fn str(this: *This) string {
+ var buf = this.buffer[0 .. @ptrToInt(this.remain.ptr) - @ptrToInt(&this.buffer)];
+ // Always leave a sentinel so that anything that expects a sentinel Just Works
+ // specifically, the reason for this is so C-based APIs can be used without an extra copy.
+ // one byte is cheap...right?
+ this.buffer[buf.len] = 0;
+ return buf;
+ }
+
+ pub fn pop(this: *This, count: usize) string {
+ this.remain = this.buffer[0 .. @ptrToInt(this.remain.ptr) - @ptrToInt(&this.buffer) - count];
+ }
+
+ pub fn reset(this: *This) void {
+ this.load();
+ }
+ };
+}
+
pub fn nql(a: anytype, b: @TypeOf(a)) bool {
return !eql(a, b);
}