const bun = @import("root").bun;
const string = bun.string;
const constStrToU8 = bun.constStrToU8;
const Output = bun.Output;
const Global = bun.Global;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const C = bun.C;
const std = @import("std");
const lex = bun.js_lexer;
const logger = @import("root").bun.logger;
const options = @import("../options.zig");
const js_parser = bun.js_parser;
const js_ast = bun.JSAst;
const linker = @import("../linker.zig");
const allocators = @import("../allocators.zig");
const sync = @import("../sync.zig");
const Api = @import("../api/schema.zig").Api;
const resolve_path = @import("../resolver/resolve_path.zig");
const configureTransformOptionsForBun = @import("../bun.js/config.zig").configureTransformOptionsForBun;
const Command = @import("../cli.zig").Command;
const bundler = bun.bundler;
const fs = @import("../fs.zig");
const URL = @import("../url.zig").URL;
const HTTP = @import("root").bun.HTTP;
const NetworkThread = HTTP.NetworkThread;
const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8;
const Archive = @import("../libarchive/libarchive.zig").Archive;
const Zlib = @import("../zlib.zig");
const JSPrinter = bun.js_printer;
const DotEnv = @import("../env_loader.zig");
const NPMClient = @import("../which_npm_client.zig").NPMClient;
const which = @import("../which.zig").which;
const clap = @import("root").bun.clap;
const Lock = @import("../lock.zig").Lock;
const Headers = @import("root").bun.HTTP.Headers;
const CopyFile = @import("../copy_file.zig");
var bun_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
const Futex = @import("../futex.zig");
const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap;
const target_nextjs_version = "12.2.3";
pub var initialized_store = false;
pub fn initializeStore() void {
if (initialized_store) return;
initialized_store = true;
js_ast.Expr.Data.Store.create(default_allocator);
js_ast.Stmt.Data.Store.create(default_allocator);
}
const skip_dirs = &[_]string{ "node_modules", ".git" };
const skip_files = &[_]string{
"package-lock.json",
"yarn.lock",
"pnpm-lock.yaml",
};
const never_conflict = &[_]string{
"README.md",
"gitignore",
".gitignore",
".git/",
};
const npm_task_args = &[_]string{"run"};
const UnsupportedPackages = struct {
@"styled-jsx": bool = false,
pub fn update(this: *UnsupportedPackages, expr: js_ast.Expr) void {
for (expr.data.e_object.properties.slice()) |prop| {
inline for (comptime std.meta.fieldNames(UnsupportedPackages)) |field_name| {
if (strings.eqlComptime(prop.key.?.data.e_string.data, comptime field_name)) {
@field(this, field_name) = true;
}
}
}
}
pub fn print(this: UnsupportedPackages) void {
inline for (comptime std.meta.fieldNames(UnsupportedPackages)) |field_name| {
if (@field(this, field_name)) {
Output.prettyErrorln("warn:\"{s}\" won't work in bun yet\n", .{field_name});
}
}
}
};
var bun_path: ?[:0]const u8 = null;
fn execTask(allocator: std.mem.Allocator, task_: string, cwd: string, _: string, npm_client: ?NPMClient) void {
const task = std.mem.trim(u8, task_, " \n\r\t");
if (task.len == 0) return;
var splitter = std.mem.split(u8, task, " ");
var count: usize = 0;
while (splitter.next() != null) {
count += 1;
}
const npm_args = 2 * @as(usize, @intCast(@intFromBool(npm_client != null)));
const total = count + npm_args;
var argv = allocator.alloc(string, total) catch return;
var proc: std.ChildProcess = undefined;
defer if (argv.len > 32) allocator.free(argv);
if (npm_client) |client| {
argv[0] = client.bin;
argv[1] = npm_task_args[0];
}
{
var i: usize = npm_args;
splitter = std.mem.split(u8, task, " ");
while (splitter.next()) |split| {
argv[i] = split;
i += 1;
}
}
if (strings.startsWith(task, "bun ")) {
argv = argv[2..];
}
Output.pretty("\n$", .{});
for (argv, 0..) |arg, i| {
if (i > argv.len - 1) {
Output.print(" {s} ", .{arg});
} else {
Output.print(" {s}", .{arg});
}
}
Output.pretty("", .{});
Output.print("\n", .{});
Output.flush();
Output.disableBuffering();
defer Output.enableBuffering();
proc = std.ChildProcess.init(argv, allocator);
proc.stdin_behavior = .Inherit;
proc.stdout_behavior = .Inherit;
proc.stderr_behavior = .Inherit;
proc.cwd = cwd;
_ = proc.spawnAndWait() catch undefined;
}
// We don't want to allocate memory each time
// But we cannot print over an existing buffer or weird stuff will happen
// so we keep two and switch between them
pub const ProgressBuf = struct {
var bufs: [2][1024]u8 = [2][1024]u8{
@as([1024]u8, undefined),
@as([1024]u8, undefined),
};
var buf_index: usize = 0;
pub fn print(comptime fmt: string, args: anytype) !string {
buf_index += 1;
return try std.fmt.bufPrint(&bufs[buf_index % 2], fmt, args);
}
pub fn pretty(comptime fmt: string, args: anytype) !string {
if (Output.enable_ansi_colors) {
return ProgressBuf.print(comptime Output.prettyFmt(fmt, true), args);
} else {
return ProgressBuf.print(comptime Output.prettyFmt(fmt, false), args);
}
}
};
const CreateOptions = struct {
npm_client: ?NPMClient.Tag = null,
skip_install: bool = false,
overwrite: bool = false,
skip_git: bool = false,
skip_package_json: bool = false,
positionals: []const string,
verbose: bool = false,
open: bool = false,
const params = [_]clap.Param(clap.Help){
clap.parseParam("--help Print this menu") catch unreachable,
clap.parseParam("--force Overwrite existing files") catch unreachable,
clap.parseParam("--no-install Don't install node_modules") catch unreachable,
clap.parseParam("--no-git Don't create a git repository") catch unreachable,
clap.parseParam("--verbose Too many logs") catch unreachable,
clap.parseParam("--no-package-json Disable package.json transforms") catch unreachable,
clap.parseParam("--open On finish, start bun & open in-browser") catch unreachable,
clap.parseParam("... ") catch unreachable,
};
pub fn parse(ctx: Command.Context, comptime print_flags_only: bool) !CreateOptions {
var diag = clap.Diagnostic{};
var args = clap.parse(clap.Help, ¶ms, .{ .diagnostic = &diag, .allocator = ctx.allocator }) catch |err| {
// Report useful error and exit
diag.report(Output.errorWriter(), err) catch {};
return err;
};
if (args.flag("--help") or comptime print_flags_only) {
if (comptime print_flags_only) {
clap.help(Output.writer(), params[1..]) catch {};
return undefined;
}
Output.prettyln("bun create\n\n flags:\n", .{});
Output.flush();
clap.help(Output.writer(), params[1..]) catch {};
Output.pretty("\n", .{});
Output.prettyln(" environment variables:\n\n", .{});
Output.prettyln(" GITHUB_ACCESS_TOKEN Downloading code from GitHub with a higher rate limit", .{});
Output.prettyln(" GITHUB_API_DOMAIN Change \"api.github.com\", useful for GitHub Enterprise\n", .{});
Output.prettyln(" NPM_CLIENT Absolute path to the npm client executable", .{});
Output.flush();
Global.exit(0);
}
var opts = CreateOptions{ .positionals = args.positionals() };
if (opts.positionals.len >= 1 and (strings.eqlComptime(opts.positionals[0], "c") or strings.eqlComptime(opts.positionals[0], "create"))) {
opts.positionals = opts.positionals[1..];
}
opts.skip_package_json = args.flag("--no-package-json");
opts.verbose = args.flag("--verbose");
opts.open = args.flag("--open");
opts.skip_install = args.flag("--no-install");
opts.skip_git = args.flag("--no-git");
opts.overwrite = args.flag("--force");
return opts;
}
};
const BUN_CREATE_DIR = ".bun-create";
var home_dir_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
pub const CreateCommand = struct {
pub fn exec(ctx: Command.Context, _: []const []const u8) !void {
@setCold(true);
Global.configureAllocator(.{ .long_running = false });
try HTTP.HTTPThread.init();
var create_options = try CreateOptions.parse(ctx, false);
const positionals = create_options.positionals;
if (positionals.len == 0) {
return try CreateListExamplesCommand.exec(ctx);
}
var filesystem = try fs.FileSystem.init(null);
var env_loader: DotEnv.Loader = brk: {
var map = try ctx.allocator.create(DotEnv.Map);
map.* = DotEnv.Map.init(ctx.allocator);
break :brk DotEnv.Loader.init(map, ctx.allocator);
};
env_loader.loadProcess();
var example_tag = Example.Tag.unknown;
var unsupported_packages = UnsupportedPackages{};
const template = brk: {
var positional = positionals[0];
if (!std.fs.path.isAbsolute(positional)) {
outer: {
if (env_loader.map.get("BUN_CREATE_DIR")) |home_dir| {
var parts = [_]string{ home_dir, positional };
var outdir_path = filesystem.absBuf(&parts, &home_dir_buf);
home_dir_buf[outdir_path.len] = 0;
var outdir_path_ = home_dir_buf[0..outdir_path.len :0];
std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer;
if (create_options.verbose) {
Output.prettyErrorln("reading from {s}", .{outdir_path});
}
example_tag = Example.Tag.local_folder;
break :brk outdir_path;
}
}
outer: {
var parts = [_]string{ filesystem.top_level_dir, BUN_CREATE_DIR, positional };
var outdir_path = filesystem.absBuf(&parts, &home_dir_buf);
home_dir_buf[outdir_path.len] = 0;
var outdir_path_ = home_dir_buf[0..outdir_path.len :0];
std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer;
if (create_options.verbose) {
Output.prettyErrorln("reading from {s}", .{outdir_path});
}
example_tag = Example.Tag.local_folder;
break :brk outdir_path;
}
outer: {
if (env_loader.map.get("HOME")) |home_dir| {
var parts = [_]string{ home_dir, BUN_CREATE_DIR, positional };
var outdir_path = filesystem.absBuf(&parts, &home_dir_buf);
home_dir_buf[outdir_path.len] = 0;
var outdir_path_ = home_dir_buf[0..outdir_path.len :0];
std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer;
if (create_options.verbose) {
Output.prettyErrorln("reading from {s}", .{outdir_path});
}
example_tag = Example.Tag.local_folder;
break :brk outdir_path;
}
}
if (std.fs.path.isAbsolute(positional)) {
example_tag = Example.Tag.local_folder;
break :brk positional;
}
var repo_begin: usize = std.math.maxInt(usize);
// "https://github.com/foo/bar"
if (strings.startsWith(positional, "github.com/")) {
repo_begin = "github.com/".len;
}
if (strings.startsWith(positional, "https://github.com/")) {
repo_begin = "https://github.com/".len;
}
if (repo_begin == std.math.maxInt(usize) and positional[0] != '/') {
if (std.mem.indexOfScalar(u8, positional, '/')) |first_slash_index| {
if (std.mem.indexOfScalar(u8, positional, '/')) |last_slash_index| {
if (first_slash_index == last_slash_index and
positional[last_slash_index..].len > 0 and
last_slash_index > 0)
{
repo_begin = 0;
}
}
}
}
if (repo_begin != std.math.maxInt(usize)) {
const remainder = positional[repo_begin..];
if (std.mem.indexOfScalar(u8, remainder, '/')) |i| {
if (i > 0 and remainder[i + 1 ..].len > 0) {
if (std.mem.indexOfScalar(u8, remainder[i + 1 ..], '/')) |last_slash| {
example_tag = Example.Tag.github_repository;
break :brk std.mem.trim(u8, remainder[0 .. i + 1 + last_slash], "# \r\t");
} else {
example_tag = Example.Tag.github_repository;
break :brk std.mem.trim(u8, remainder, "# \r\t");
}
}
}
}
}
example_tag = Example.Tag.official;
break :brk positional;
};
const dirname: string = brk: {
if (positionals.len == 1) {
break :brk std.fs.path.basename(template);
}
break :brk positionals[1];
};
const destination = try filesystem.dirname_store.append([]const u8, resolve_path.joinAbs(filesystem.top_level_dir, .auto, dirname));
var progress = std.Progress{};
var node = progress.start(try ProgressBuf.print("Loading {s}", .{template}), 0);
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
// alacritty is fast
if (env_loader.map.get("ALACRITTY_LOG") != null) {
progress.refresh_rate_ns = std.time.ns_per_ms * 8;
if (create_options.verbose) {
Output.prettyErrorln("alacritty gets faster progress bars ", .{});
}
}
defer {
progress.refresh();
}
var package_json_contents: MutableString = undefined;
var package_json_file: ?std.fs.File = null;
if (create_options.verbose) {
Output.prettyErrorln("Downloading as {s}\n", .{@tagName(example_tag)});
}
switch (example_tag) {
Example.Tag.github_repository, Example.Tag.official => {
var tarball_bytes: MutableString = switch (example_tag) {
.official => Example.fetch(ctx, &env_loader, template, &progress, node) catch |err| {
switch (err) {
error.HTTPForbidden, error.ExampleNotFound => {
node.end();
progress.refresh();
Output.prettyError("\nerror:\"{s}\" was not found. Here are templates you can use:\n\n", .{
template,
});
Output.flush();
const examples = try Example.fetchAllLocalAndRemote(ctx, null, &env_loader, filesystem);
Example.print(examples.items, dirname);
Global.exit(1);
},
else => {
node.end();
progress.refresh();
Output.prettyErrorln("\n\n", .{});
return err;
},
}
},
.github_repository => Example.fetchFromGitHub(ctx, &env_loader, template, &progress, node) catch |err| {
switch (err) {
error.HTTPForbidden => {
node.end();
progress.refresh();
Output.prettyError("\nerror: GitHub returned 403. This usually means GitHub is rate limiting your requests.\nTo fix this, either:A) pass a GITHUB_ACCESS_TOKEN environment variable to bun\n B)Wait a little and try again\n", .{});
Global.crash();
},
error.GitHubRepositoryNotFound => {
node.end();
progress.refresh();
Output.prettyError("\nerror:\"{s}\" was not found on GitHub. Here are templates you can use:\n\n", .{
template,
});
Output.flush();
const examples = try Example.fetchAllLocalAndRemote(ctx, null, &env_loader, filesystem);
Example.print(examples.items, dirname);
Global.crash();
},
else => {
node.end();
progress.refresh();
Output.prettyErrorln("\n\n", .{});
return err;
},
}
},
else => unreachable,
};
node.name = try ProgressBuf.print("Decompressing {s}", .{template});
node.setCompletedItems(0);
node.setEstimatedTotalItems(0);
progress.refresh();
var file_buf = try ctx.allocator.alloc(u8, 16384);
var tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
var gunzip = try Zlib.ZlibReaderArrayList.init(tarball_bytes.list.items, &tarball_buf_list, ctx.allocator);
try gunzip.readAll();
gunzip.deinit();
node.name = try ProgressBuf.print("Extracting {s}", .{template});
node.setCompletedItems(0);
node.setEstimatedTotalItems(0);
progress.refresh();
var pluckers: [1]Archive.Plucker = if (!create_options.skip_package_json)
[1]Archive.Plucker{try Archive.Plucker.init("package.json", 2048, ctx.allocator)}
else
[1]Archive.Plucker{undefined};
var archive_context = Archive.Context{
.pluckers = pluckers[0..@as(usize, @intCast(@intFromBool(!create_options.skip_package_json)))],
.all_files = undefined,
.overwrite_list = bun.StringArrayHashMap(void).init(ctx.allocator),
};
if (!create_options.overwrite) {
try Archive.getOverwritingFileList(
tarball_buf_list.items,
destination,
&archive_context,
@TypeOf(filesystem.dirname_store),
filesystem.dirname_store,
1,
);
inline for (never_conflict) |never_conflict_path| {
_ = archive_context.overwrite_list.swapRemove(never_conflict_path);
}
if (archive_context.overwrite_list.count() > 0) {
node.end();
progress.refresh();
// Thank you create-react-app for this copy (and idea)
Output.prettyErrorln(
"\nerror: The directory {s}/ contains files that could conflict:\n\n",
.{
std.fs.path.basename(destination),
},
);
for (archive_context.overwrite_list.keys()) |path| {
if (strings.endsWith(path, std.fs.path.sep_str)) {
Output.prettyError("{s}", .{path[0 .. @max(path.len, 1) - 1]});
Output.prettyErrorln(std.fs.path.sep_str, .{});
} else {
Output.prettyErrorln(" {s}", .{path});
}
}
Output.prettyErrorln("\nTo download {s} anyway, use --force", .{template});
Global.exit(1);
}
}
_ = try Archive.extractToDisk(
tarball_buf_list.items,
destination,
&archive_context,
void,
{},
1,
false,
false,
);
if (!create_options.skip_package_json) {
var plucker = pluckers[0];
if (plucker.found and plucker.fd != 0) {
node.name = "Updating package.json";
progress.refresh();
package_json_contents = plucker.contents;
package_json_file = std.fs.File{ .handle = bun.fdcast(plucker.fd) };
}
}
},
.local_folder => {
var template_parts = [_]string{template};
node.name = "Copying files";
progress.refresh();
const template_dir = std.fs.cwd().openIterableDir(filesystem.abs(&template_parts), .{}) catch |err| {
node.end();
progress.refresh();
Output.prettyErrorln("{s}: opening dir {s}", .{ @errorName(err), template });
Global.exit(1);
};
std.fs.deleteTreeAbsolute(destination) catch {};
const destination_dir__ = std.fs.cwd().makeOpenPathIterable(destination, .{}) catch |err| {
node.end();
progress.refresh();
Output.prettyErrorln("{s}: creating dir {s}", .{ @errorName(err), destination });
Global.exit(1);
};
const destination_dir = destination_dir__.dir;
const Walker = @import("../walker_skippable.zig");
var walker_ = try Walker.walk(template_dir, ctx.allocator, skip_files, skip_dirs);
defer walker_.deinit();
const FileCopier = struct {
pub fn copy(
destination_dir_: std.fs.Dir,
walker: *Walker,
node_: *std.Progress.Node,
progress_: *std.Progress,
) !void {
while (try walker.next()) |entry| {
if (entry.kind != .file) continue;
var outfile = destination_dir_.createFile(entry.path, .{}) catch brk: {
if (std.fs.path.dirname(entry.path)) |entry_dirname| {
destination_dir_.makePath(entry_dirname) catch {};
}
break :brk destination_dir_.createFile(entry.path, .{}) catch |err| {
node_.end();
progress_.refresh();
Output.prettyErrorln("{s}: copying file {s}", .{ @errorName(err), entry.path });
Global.exit(1);
};
};
defer outfile.close();
defer node_.completeOne();
var infile = try entry.dir.dir.openFile(entry.basename, .{ .mode = .read_only });
defer infile.close();
if (comptime Environment.isPosix) {
// Assumption: you only really care about making sure something that was executable is still executable
const stat = infile.stat() catch continue;
_ = C.fchmod(outfile.handle, stat.mode);
} else {
bun.todo(@src(), void{});
}
CopyFile.copyFile(infile.handle, outfile.handle) catch |err| {
Output.prettyErrorln("{s}: copying file {s}", .{ @errorName(err), entry.path });
Global.exit(1);
};
}
}
};
try FileCopier.copy(destination_dir, &walker_, node, &progress);
package_json_file = destination_dir.openFile("package.json", .{ .mode = .read_write }) catch null;
read_package_json: {
if (package_json_file) |pkg| {
const size = brk: {
if (comptime Environment.isWindows) {
break :brk try pkg.getEndPos();
}
const stat = pkg.stat() catch |err| {
node.end();
progress.refresh();
package_json_file = null;
Output.prettyErrorln("Error reading package.json: {s}", .{@errorName(err)});
break :read_package_json;
};
if (stat.kind != .file or stat.size == 0) {
package_json_file = null;
node.end();
progress.refresh();
break :read_package_json;
}
break :brk stat.size;
};
package_json_contents = try MutableString.init(ctx.allocator, size);
package_json_contents.list.expandToCapacity();
_ = pkg.preadAll(package_json_contents.list.items, 0) catch |err| {
package_json_file = null;
node.end();
progress.refresh();
Output.prettyErrorln("Error reading package.json: {s}", .{@errorName(err)});
break :read_package_json;
};
// The printer doesn't truncate, so we must do so manually
std.os.ftruncate(pkg.handle, 0) catch {};
initializeStore();
}
}
},
else => unreachable,
}
node.end();
progress.refresh();
var is_nextjs = false;
var is_create_react_app = false;
var create_react_app_entry_point_path: string = "";
var preinstall_tasks = std.mem.zeroes(std.ArrayListUnmanaged([]const u8));
var postinstall_tasks = std.mem.zeroes(std.ArrayListUnmanaged([]const u8));
var has_dependencies: bool = false;
const PATH = env_loader.map.get("PATH") orelse "";
{
var parent_dir = try std.fs.openDirAbsolute(destination, .{});
defer parent_dir.close();
std.os.linkat(parent_dir.fd, "gitignore", parent_dir.fd, ".gitignore", 0) catch {};
std.os.unlinkat(
parent_dir.fd,
"gitignore",
0,
) catch {};
std.os.unlinkat(
parent_dir.fd,
".npmignore",
0,
) catch {};
}
var start_command: string = "bun dev";
process_package_json: {
if (create_options.skip_package_json) package_json_file = null;
if (package_json_file != null) {
initializeStore();
var source = logger.Source.initPathString("package.json", package_json_contents.list.items);
var package_json_expr = ParseJSON(&source, ctx.log, ctx.allocator) catch {
package_json_file = null;
break :process_package_json;
};
if (package_json_expr.data != .e_object) {
package_json_file = null;
break :process_package_json;
}
var properties_list = std.ArrayList(js_ast.G.Property).fromOwnedSlice(default_allocator, package_json_expr.data.e_object.properties.slice());
if (ctx.log.errors > 0) {
if (Output.enable_ansi_colors) {
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true);
} else {
try ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false);
}
package_json_file = null;
break :process_package_json;
}
if (package_json_expr.asProperty("name")) |name_expr| {
if (name_expr.expr.data == .e_string) {
var basename = std.fs.path.basename(destination);
name_expr.expr.data.e_string.data = @as([*]u8, @ptrFromInt(@intFromPtr(basename.ptr)))[0..basename.len];
}
}
const Needs = struct {
bun_bun_for_nextjs: bool = false,
bun_macro_relay: bool = false,
bun_macro_relay_dependency: bool = false,
bun_framework_next: bool = false,
react_refresh: bool = false,
};
var needs = Needs{};
var has_relay = false;
var has_bun_framework_next = false;
var has_react_refresh = false;
var has_bun_macro_relay = false;
var has_react = false;
var has_react_scripts = false;
const Prune = struct {
pub const packages = ComptimeStringMap(void, .{
.{ "@parcel/babel-preset", {} },
.{ "@parcel/core", {} },
.{ "@swc/cli", {} },
.{ "@swc/core", {} },
.{ "@webpack/cli", {} },
.{ "react-scripts", {} },
.{ "webpack-cli", {} },
.{ "webpack", {} },
// one of cosmic config's imports breaks stuff
.{ "cosmiconfig", {} },
});
pub var prune_count: u16 = 0;
pub fn prune(list: []js_ast.G.Property) []js_ast.G.Property {
var i: usize = 0;
var out_i: usize = 0;
while (i < list.len) : (i += 1) {
const key = list[i].key.?.data.e_string.data;
const do_prune = packages.has(key);
prune_count += @as(u16, @intCast(@intFromBool(do_prune)));
if (!do_prune) {
list[out_i] = list[i];
out_i += 1;
}
}
return list[0..out_i];
}
};
var dev_dependencies: ?js_ast.Expr = null;
var dependencies: ?js_ast.Expr = null;
if (package_json_expr.asProperty("devDependencies")) |q| {
const property = q.expr;
if (property.data == .e_object and property.data.e_object.properties.len > 0) {
unsupported_packages.update(property);
has_react_scripts = has_react_scripts or property.hasAnyPropertyNamed(&.{"react-scripts"});
has_relay = has_relay or property.hasAnyPropertyNamed(&.{ "react-relay", "relay-runtime", "babel-plugin-relay" });
property.data.e_object.properties = js_ast.G.Property.List.init(Prune.prune(property.data.e_object.properties.slice()));
if (property.data.e_object.properties.len > 0) {
has_dependencies = true;
dev_dependencies = q.expr;
has_bun_framework_next = has_bun_framework_next or property.hasAnyPropertyNamed(&.{"bun-framework-next"});
has_react = has_react or property.hasAnyPropertyNamed(&.{ "react", "react-dom", "react-relay", "@emotion/react" });
has_bun_macro_relay = has_bun_macro_relay or property.hasAnyPropertyNamed(&.{"bun-macro-relay"});
has_react_refresh = has_react_refresh or property.hasAnyPropertyNamed(&.{"react-refresh"});
}
}
}
if (package_json_expr.asProperty("dependencies")) |q| {
const property = q.expr;
if (property.data == .e_object and property.data.e_object.properties.len > 0) {
unsupported_packages.update(property);
has_react_scripts = has_react_scripts or property.hasAnyPropertyNamed(&.{"react-scripts"});
has_relay = has_relay or property.hasAnyPropertyNamed(&.{ "react-relay", "relay-runtime", "babel-plugin-relay" });
property.data.e_object.properties = js_ast.G.Property.List.init(Prune.prune(property.data.e_object.properties.slice()));
if (property.data.e_object.properties.len > 0) {
has_dependencies = true;
dependencies = q.expr;
if (property.asProperty("next")) |next_q| {
is_nextjs = true;
needs.bun_bun_for_nextjs = true;
next_q.expr.data.e_string.data = constStrToU8(target_nextjs_version);
}
has_bun_framework_next = has_bun_framework_next or property.hasAnyPropertyNamed(&.{"bun-framework-next"});
has_react = has_react or is_nextjs or property.hasAnyPropertyNamed(&.{ "react", "react-dom", "react-relay", "@emotion/react" });
has_react_refresh = has_react_refresh or property.hasAnyPropertyNamed(&.{"react-refresh"});
has_bun_macro_relay = has_bun_macro_relay or property.hasAnyPropertyNamed(&.{"bun-macro-relay"});
}
}
}
needs.bun_macro_relay = !has_bun_macro_relay and has_relay;
needs.react_refresh = !has_react_refresh and has_react;
needs.bun_framework_next = is_nextjs and !has_bun_framework_next;
needs.bun_bun_for_nextjs = is_nextjs;
needs.bun_macro_relay_dependency = needs.bun_macro_relay;
var bun_bun_for_react_scripts = false;
var bun_macros_prop: ?js_ast.Expr = null;
var bun_prop: ?js_ast.Expr = null;
var bun_relay_prop: ?js_ast.Expr = null;
var needs_bun_prop = needs.bun_macro_relay or has_bun_macro_relay;
var needs_bun_macros_prop = needs_bun_prop;
if (needs_bun_macros_prop) {
if (package_json_expr.asProperty("bun")) |bun_| {
needs_bun_prop = false;
bun_prop = bun_.expr;
if (bun_.expr.asProperty("macros")) |macros_q| {
bun_macros_prop = macros_q.expr;
needs_bun_macros_prop = false;
if (macros_q.expr.asProperty("react-relay")) |react_relay_q| {
bun_relay_prop = react_relay_q.expr;
needs.bun_macro_relay = react_relay_q.expr.asProperty("graphql") == null;
}
if (macros_q.expr.asProperty("babel-plugin-relay/macro")) |react_relay_q| {
bun_relay_prop = react_relay_q.expr;
needs.bun_macro_relay = react_relay_q.expr.asProperty("graphql") == null;
}
}
}
}
if (Prune.prune_count > 0) {
Output.prettyErrorln("[package.json] Pruned {d} unnecessary packages", .{Prune.prune_count});
}
// if (create_options.verbose) {
if (needs.bun_macro_relay) {
Output.prettyErrorln("[package.json] Detected Relay -> added \"bun-macro-relay\"", .{});
}
if (needs.react_refresh) {
Output.prettyErrorln("[package.json] Detected React -> added \"react-refresh\"", .{});
}
if (needs.bun_framework_next) {
Output.prettyErrorln("[package.json] Detected Next -> added \"bun-framework-next\"", .{});
} else if (is_nextjs) {
Output.prettyErrorln("[package.json] Detected Next.js", .{});
}
// }
var needs_to_inject_dev_dependency = needs.react_refresh or needs.bun_macro_relay;
var needs_to_inject_dependency = needs.bun_framework_next;
const dependencies_to_inject_count = @as(usize, @intCast(@intFromBool(needs.bun_framework_next)));
const dev_dependencies_to_inject_count = @as(usize, @intCast(@intFromBool(needs.react_refresh))) +
@as(usize, @intCast(@intFromBool(needs.bun_macro_relay)));
const new_properties_count = @as(usize, @intCast(@intFromBool(needs_to_inject_dev_dependency and dev_dependencies == null))) +
@as(usize, @intCast(@intFromBool(needs_to_inject_dependency and dependencies == null))) +
@as(usize, @intCast(@intFromBool(needs_bun_prop)));
if (new_properties_count != 0) {
try properties_list.ensureUnusedCapacity(new_properties_count);
}
const E = js_ast.E;
const InjectionPrefill = struct {
const dependencies_string = "dependencies";
const dev_dependencies_string = "devDependencies";
const bun_string = "bun";
const macros_string = "macros";
const bun_macros_relay_path = "bun-macro-relay";
pub var dependencies_e_string = E.String.init(dependencies_string);
pub var devDependencies_e_string = E.String.init(dev_dependencies_string);
pub var bun_e_string = E.String.init(bun_string);
pub var macros_e_string = E.String.init(macros_string);
pub var react_relay_string = E.String.init("react-relay");
pub var bun_macros_relay_path_string = E.String.init("bun-macro-relay");
pub var babel_plugin_relay_macro = E.String.init("babel-plugin-relay/macro");
pub var babel_plugin_relay_macro_js = E.String.init("babel-plugin-relay/macro.js");
pub var graphql_string = E.String.init("graphql");
var npx_react_scripts_build_str = E.String.init("npx react-scripts build");
pub const npx_react_scripts_build = js_ast.Expr{ .data = .{ .e_string = &npx_react_scripts_build_str }, .loc = logger.Loc.Empty };
var bun_macro_relay_properties = [_]js_ast.G.Property{
js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &graphql_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_string = &bun_macros_relay_path_string,
},
.loc = logger.Loc.Empty,
},
},
};
var bun_macro_relay_object = js_ast.E.Object{
.properties = undefined,
};
var bun_macros_relay_object_properties = [_]js_ast.G.Property{
js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &react_relay_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_object = &bun_macro_relay_object,
},
.loc = logger.Loc.Empty,
},
},
js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &babel_plugin_relay_macro,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_object = &bun_macro_relay_object,
},
.loc = logger.Loc.Empty,
},
},
js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &babel_plugin_relay_macro_js,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_object = &bun_macro_relay_object,
},
.loc = logger.Loc.Empty,
},
},
};
pub var bun_macros_relay_object = E.Object{
.properties = undefined,
};
var bun_macros_relay_only_object_string = js_ast.E.String.init("macros");
pub var bun_macros_relay_only_object_properties = [_]js_ast.G.Property{
js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &bun_macros_relay_only_object_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_object = &bun_macros_relay_object,
},
.loc = logger.Loc.Empty,
},
},
};
pub var bun_macros_relay_only_object = E.Object{ .properties = undefined };
var bun_only_macros_string = js_ast.E.String.init("bun");
pub var bun_only_macros_relay_property = js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &bun_only_macros_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_object = &bun_macros_relay_only_object,
},
.loc = logger.Loc.Empty,
},
};
pub var bun_framework_next_string = js_ast.E.String.init("bun-framework-next");
pub var bun_framework_next_version = js_ast.E.String.init("latest");
pub var bun_framework_next_property = js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &bun_framework_next_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_string = &bun_framework_next_version,
},
.loc = logger.Loc.Empty,
},
};
pub var bun_macro_relay_dependency_string = js_ast.E.String.init("bun-macro-relay");
pub var bun_macro_relay_dependency_version = js_ast.E.String.init("latest");
pub var bun_macro_relay_dependency = js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &bun_macro_relay_dependency_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_string = &bun_macro_relay_dependency_version,
},
.loc = logger.Loc.Empty,
},
};
pub var refresh_runtime_string = js_ast.E.String.init("react-refresh");
pub var refresh_runtime_version = js_ast.E.String.init("0.10.0");
pub var react_refresh_dependency = js_ast.G.Property{
.key = js_ast.Expr{
.data = .{
.e_string = &refresh_runtime_string,
},
.loc = logger.Loc.Empty,
},
.value = js_ast.Expr{
.data = .{
.e_string = &refresh_runtime_version,
},
.loc = logger.Loc.Empty,
},
};
pub var dev_dependencies_key = js_ast.Expr{
.data = .{
.e_string = &devDependencies_e_string,
},
.loc = logger.Loc.Empty,
};
pub var dependencies_key = js_ast.Expr{
.data = .{ .e_string = &dependencies_e_string },
.loc = logger.Loc.Empty,
};
pub const bun_bun_for_nextjs_task: string = "bun bun --use next";
};
InjectionPrefill.bun_macro_relay_object.properties = js_ast.G.Property.List.init(InjectionPrefill.bun_macro_relay_properties[0..]);
InjectionPrefill.bun_macros_relay_object.properties = js_ast.G.Property.List.init(&InjectionPrefill.bun_macros_relay_object_properties);
InjectionPrefill.bun_macros_relay_only_object.properties = js_ast.G.Property.List.init(&InjectionPrefill.bun_macros_relay_only_object_properties);
if (needs_to_inject_dev_dependency and dev_dependencies == null) {
var e_object = try ctx.allocator.create(E.Object);
e_object.* = E.Object{};
const value = js_ast.Expr{ .data = .{ .e_object = e_object }, .loc = logger.Loc.Empty };
properties_list.appendAssumeCapacity(js_ast.G.Property{
.key = InjectionPrefill.dev_dependencies_key,
.value = value,
});
dev_dependencies = value;
}
if (needs_to_inject_dependency and dependencies == null) {
var e_object = try ctx.allocator.create(E.Object);
e_object.* = E.Object{};
const value = js_ast.Expr{ .data = .{ .e_object = e_object }, .loc = logger.Loc.Empty };
properties_list.appendAssumeCapacity(js_ast.G.Property{
.key = InjectionPrefill.dependencies_key,
.value = value,
});
dependencies = value;
}
// inject an object like this, handling each permutation of what may or may not exist:
// {
// "bun": {
// "macros": {
// "react-relay": {
// "graphql": "bun-macro-relay"
// }
// }
// }
// }
bun_section: {
// "bun.macros.react-relay.graphql"
if (needs.bun_macro_relay and !needs_bun_prop and !needs_bun_macros_prop) {
// "graphql" is the only valid one for now, so anything else in this object is invalid.
bun_relay_prop.?.data.e_object = InjectionPrefill.bun_macros_relay_object.properties.ptr[0].value.?.data.e_object;
needs_bun_macros_prop = false;
needs_bun_prop = false;
needs.bun_macro_relay = false;
break :bun_section;
}
// "bun.macros"
if (needs_bun_macros_prop and !needs_bun_prop) {
var obj = bun_prop.?.data.e_object;
var properties = try std.ArrayList(js_ast.G.Property).initCapacity(
ctx.allocator,
obj.properties.len + InjectionPrefill.bun_macros_relay_object.properties.len,
);
defer obj.properties.update(properties);
try properties.insertSlice(0, obj.properties.slice());
try properties.insertSlice(0, InjectionPrefill.bun_macros_relay_object.properties.slice());
needs_bun_macros_prop = false;
needs_bun_prop = false;
needs.bun_macro_relay = false;
break :bun_section;
}
// "bun"
if (needs_bun_prop) {
try properties_list.append(InjectionPrefill.bun_only_macros_relay_property);
needs_bun_macros_prop = false;
needs_bun_prop = false;
needs.bun_macro_relay = false;
break :bun_section;
}
}
if (needs_to_inject_dependency) {
defer needs_to_inject_dependency = false;
var obj = dependencies.?.data.e_object;
var properties = try std.ArrayList(js_ast.G.Property).initCapacity(
ctx.allocator,
obj.properties.len + dependencies_to_inject_count,
);
try properties.insertSlice(0, obj.properties.slice());
defer obj.properties.update(properties);
if (needs.bun_framework_next) {
properties.appendAssumeCapacity(InjectionPrefill.bun_framework_next_property);
needs.bun_framework_next = false;
}
}
if (needs_to_inject_dev_dependency) {
defer needs_to_inject_dev_dependency = false;
var obj = dev_dependencies.?.data.e_object;
var properties = try std.ArrayList(js_ast.G.Property).initCapacity(
ctx.allocator,
obj.properties.len + dev_dependencies_to_inject_count,
);
try properties.insertSlice(0, obj.properties.slice());
defer obj.properties.update(properties);
if (needs.bun_macro_relay_dependency) {
properties.appendAssumeCapacity(InjectionPrefill.bun_macro_relay_dependency);
needs.bun_macro_relay_dependency = false;
}
if (needs.react_refresh) {
properties.appendAssumeCapacity(InjectionPrefill.react_refresh_dependency);
needs.react_refresh = false;
}
}
// this is a little dicey
// The idea is:
// Before the closing
tag of Create React App's public/index.html
// Inject ""
// Only do this for create-react-app
// Which we define as:
// 1. has a "public/index.html"
// 2. "react-scripts" in package.json dependencies or devDependencies
// 3. has a src/index.{jsx,tsx,ts,mts,mcjs}
// If at any point those expectations are not matched OR the string /src/index.js already exists in the HTML
// don't do it!
if (has_react_scripts) {
bail: {
var public_index_html_parts = [_]string{ destination, "public/index.html" };
var public_index_html_path = filesystem.absBuf(&public_index_html_parts, &bun_path_buf);
const public_index_html_file = std.fs.openFileAbsolute(public_index_html_path, .{ .mode = .read_write }) catch break :bail;
defer public_index_html_file.close();
const file_extensions_to_try = [_]string{ ".tsx", ".ts", ".jsx", ".js", ".mts", ".mcjs" };
var found_file = false;
var entry_point_path: string = "";
var entry_point_file_parts = [_]string{ destination, "src/index" };
var entry_point_file_path_base = filesystem.absBuf(&entry_point_file_parts, &bun_path_buf);
for (file_extensions_to_try) |ext| {
bun.copy(u8, bun_path_buf[entry_point_file_path_base.len..], ext);
entry_point_path = bun_path_buf[0 .. entry_point_file_path_base.len + ext.len];
std.fs.accessAbsolute(entry_point_path, .{}) catch continue;
found_file = true;
break;
}
if (!found_file) break :bail;
var public_index_file_contents = public_index_html_file.readToEndAlloc(ctx.allocator, public_index_html_file.getEndPos() catch break :bail) catch break :bail;
if (std.mem.indexOf(u8, public_index_file_contents, entry_point_path[destination.len..]) != null) {
break :bail;
}
var body_closing_tag: usize = std.mem.lastIndexOf(u8, public_index_file_contents, "") orelse break :bail;
var public_index_file_out = std.ArrayList(u8).initCapacity(ctx.allocator, public_index_file_contents.len) catch break :bail;
var html_writer = public_index_file_out.writer();
_ = html_writer.writeAll(public_index_file_contents[0..body_closing_tag]) catch break :bail;
create_react_app_entry_point_path = std.fmt.allocPrint(
ctx.allocator,
"./{s}",
.{
std.mem.trimLeft(
u8,
entry_point_path[destination.len..],
"/",
),
},
) catch break :bail;
html_writer.print(
"\n{s}",
.{
create_react_app_entry_point_path[2..],
public_index_file_contents[body_closing_tag..],
},
) catch break :bail;
var outfile = std.mem.replaceOwned(u8, ctx.allocator, public_index_file_out.items, "%PUBLIC_URL%", "") catch break :bail;
// don't do this actually
// it completely breaks when there is more than one CSS file loaded
// // bonus: check for an index.css file
// // inject it into the .html file statically if the file exists but isn't already in
// inject_css: {
// const head_i: usize = std.mem.indexOf(u8, outfile, "