aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/cli/run_command.zig338
-rw-r--r--src/install/install.zig96
-rw-r--r--src/install/lockfile.zig69
3 files changed, 336 insertions, 167 deletions
diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig
index 5986781e4..9a56041b3 100644
--- a/src/cli/run_command.zig
+++ b/src/cli/run_command.zig
@@ -204,7 +204,7 @@ pub const RunCommand = struct {
}
pub fn runPackageScript(
- ctx: Command.Context,
+ allocator: std.mem.Allocator,
original_script: string,
name: string,
cwd: string,
@@ -215,7 +215,7 @@ pub const RunCommand = struct {
const shell_bin = findShell(env.map.get("PATH") orelse "", cwd) orelse return error.MissingShell;
var script = original_script;
- var copy_script = try std.ArrayList(u8).initCapacity(ctx.allocator, script.len);
+ var copy_script = try std.ArrayList(u8).initCapacity(allocator, script.len);
// We're going to do this slowly.
// Find exact matches of yarn, pnpm, npm
@@ -228,7 +228,7 @@ pub const RunCommand = struct {
for (passthrough) |p| {
combined_script_len += p.len + 1;
}
- var combined_script_buf = try ctx.allocator.alloc(u8, combined_script_len);
+ var combined_script_buf = try allocator.alloc(u8, combined_script_len);
std.mem.copy(u8, combined_script_buf, script);
var remaining_script_buf = combined_script_buf[script.len..];
for (passthrough) |p| {
@@ -240,14 +240,14 @@ pub const RunCommand = struct {
}
var argv = [_]string{ shell_bin, "-c", combined_script };
- var child_process = std.ChildProcess.init(&argv, ctx.allocator);
+ var child_process = std.ChildProcess.init(&argv, allocator);
if (!silent) {
Output.prettyErrorln("<r><d><magenta>$<r> <d><b>{s}<r>", .{combined_script});
Output.flush();
}
- var buf_map = try env.map.cloneToEnvMap(ctx.allocator);
+ var buf_map = try env.map.cloneToEnvMap(allocator);
child_process.env_map = &buf_map;
child_process.cwd = cwd;
@@ -337,6 +337,168 @@ pub const RunCommand = struct {
}
pub const Filter = enum { script, bin, all, bun_js, all_plus_bun_js, script_and_descriptions, script_exclude };
+ const DirInfo = @import("../resolver/dir_info.zig");
+ pub fn configureEnvForRun(
+ ctx: Command.Context,
+ this_bundler: *bundler.Bundler,
+ env: ?*DotEnv.Loader,
+ ORIGINAL_PATH: *string,
+ log_errors: bool,
+ ) !*DirInfo {
+ var args = ctx.args;
+ args.node_modules_bundle_path = null;
+ args.node_modules_bundle_path_server = null;
+ args.generate_node_module_bundle = false;
+
+ this_bundler.* = try bundler.Bundler.init(ctx.allocator, ctx.log, args, null, env);
+ this_bundler.options.env.behavior = Api.DotEnvBehavior.load_all;
+ this_bundler.options.env.prefix = "";
+
+ this_bundler.resolver.care_about_bin_folder = true;
+ this_bundler.resolver.care_about_scripts = true;
+ this_bundler.configureLinker();
+
+ var root_dir_info = this_bundler.resolver.readDirInfo(this_bundler.fs.top_level_dir) catch |err| {
+ if (!log_errors) return error.CouldntReadCurrentDirectory;
+ if (Output.enable_ansi_colors) {
+ ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
+ } else {
+ ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
+ }
+ Output.prettyErrorln("Error loading directory: \"{s}\"", .{@errorName(err)});
+ Output.flush();
+ return err;
+ } orelse {
+ if (Output.enable_ansi_colors) {
+ ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
+ } else {
+ ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
+ }
+ Output.prettyErrorln("Error loading current directory", .{});
+ Output.flush();
+ return error.CouldntReadCurrentDirectory;
+ };
+
+ var package_json_dir: string = "";
+
+ if (env == null) {
+ this_bundler.env.loadProcess();
+
+ if (this_bundler.env.map.get("NODE_ENV")) |node_env| {
+ if (strings.eqlComptime(node_env, "production")) {
+ this_bundler.options.production = true;
+ }
+ }
+
+ // Run .env in the root dir
+ this_bundler.runEnvLoader() catch {};
+
+ if (root_dir_info.getEntries()) |dir| {
+
+ // Run .env again if it exists in a parent dir
+ if (this_bundler.options.production) {
+ this_bundler.env.load(&this_bundler.fs.fs, dir, false) catch {};
+ } else {
+ this_bundler.env.load(&this_bundler.fs.fs, dir, true) catch {};
+ }
+ }
+ }
+
+ var bin_dirs = this_bundler.resolver.binDirs();
+
+ if (root_dir_info.enclosing_package_json) |package_json| {
+ if (root_dir_info.package_json == null) {
+ // no trailing slash
+ package_json_dir = std.mem.trimRight(u8, package_json.source.path.name.dir, "/");
+ }
+ }
+
+ var PATH = this_bundler.env.map.get("PATH") orelse "";
+ ORIGINAL_PATH.* = PATH;
+
+ if (bin_dirs.len > 0 or package_json_dir.len > 0) {
+ var new_path_len: usize = PATH.len + 2;
+ for (bin_dirs) |bin| {
+ new_path_len += bin.len + 1;
+ }
+
+ if (package_json_dir.len > 0) {
+ new_path_len += package_json_dir.len + 1;
+ }
+
+ var new_path = try std.ArrayList(u8).initCapacity(ctx.allocator, new_path_len);
+
+ {
+ var needs_colon = false;
+ if (package_json_dir.len > 0) {
+ defer needs_colon = true;
+ if (needs_colon) {
+ try new_path.append(':');
+ }
+ try new_path.appendSlice(package_json_dir);
+ }
+
+ var bin_dir_i: i32 = @intCast(i32, bin_dirs.len) - 1;
+ // Iterate in reverse order
+ // Directories are added to bin_dirs in top-down order
+ // That means the parent-most node_modules/.bin will be first
+ while (bin_dir_i >= 0) : (bin_dir_i -= 1) {
+ defer needs_colon = true;
+ if (needs_colon) {
+ try new_path.append(':');
+ }
+ try new_path.appendSlice(bin_dirs[@intCast(usize, bin_dir_i)]);
+ }
+
+ if (needs_colon) {
+ try new_path.append(':');
+ }
+ try new_path.appendSlice(PATH);
+ }
+
+ this_bundler.env.map.put("PATH", new_path.items) catch unreachable;
+ PATH = new_path.items;
+ }
+
+ this_bundler.env.loadNodeJSConfig(this_bundler.fs) catch {};
+ this_bundler.env.map.putDefault("npm_config_local_prefix", this_bundler.fs.top_level_dir) catch unreachable;
+
+ // we have no way of knowing what version they're expecting without running the node executable
+ // running the node executable is too slow
+ // so we will just hardcode it to LTS
+ this_bundler.env.map.putDefault(
+ "npm_config_user_agent",
+ // the use of npm/? is copying yarn
+ // e.g.
+ // > "yarn/1.22.4 npm/? node/v12.16.3 darwin x64",
+ "bun/" ++ Global.package_json_version ++ " npm/? node/v16.14.0 " ++ Global.os_name ++ " " ++ Global.arch_name,
+ ) catch unreachable;
+
+ if (this_bundler.env.get("npm_execpath") == null) {
+ // we don't care if this fails
+ if (std.fs.selfExePathAlloc(ctx.allocator)) |self_exe_path| {
+ this_bundler.env.map.putDefault("npm_execpath", self_exe_path) catch unreachable;
+ } else |_| {}
+ }
+
+ if (root_dir_info.enclosing_package_json) |package_json| {
+ if (package_json.name.len > 0) {
+ if (this_bundler.env.map.get(NpmArgs.package_name) == null) {
+ this_bundler.env.map.put(NpmArgs.package_name, package_json.name) catch unreachable;
+ }
+ }
+
+ this_bundler.env.map.putDefault("npm_package_json", package_json.source.path.text) catch unreachable;
+
+ if (package_json.version.len > 0) {
+ if (this_bundler.env.map.get(NpmArgs.package_version) == null) {
+ this_bundler.env.map.put(NpmArgs.package_version, package_json.version) catch unreachable;
+ }
+ }
+ }
+
+ return root_dir_info;
+ }
pub fn completions(ctx: Command.Context, default_completions: ?[]const string, reject_list: []const string, comptime filter: Filter) !ShellCompletions {
var shell_out = ShellCompletions{};
@@ -664,164 +826,11 @@ pub const RunCommand = struct {
Global.configureAllocator(.{ .long_running = false });
- var args = ctx.args;
- args.node_modules_bundle_path = null;
- args.node_modules_bundle_path_server = null;
- args.generate_node_module_bundle = false;
-
- var this_bundler = try bundler.Bundler.init(ctx.allocator, ctx.log, args, null, null);
- this_bundler.options.env.behavior = Api.DotEnvBehavior.load_all;
- this_bundler.options.env.prefix = "";
- this_bundler.env.quiet = true;
-
- this_bundler.resolver.care_about_bin_folder = true;
- this_bundler.resolver.care_about_scripts = true;
- defer {
- this_bundler.resolver.care_about_bin_folder = false;
- this_bundler.resolver.care_about_scripts = false;
- }
- this_bundler.configureLinker();
-
- var root_dir_info = this_bundler.resolver.readDirInfo(this_bundler.fs.top_level_dir) catch |err| {
- if (!log_errors) return false;
- if (Output.enable_ansi_colors) {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
- } else {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
- }
- Output.prettyErrorln("Error loading directory: \"{s}\"", .{@errorName(err)});
- Output.flush();
- return err;
- } orelse {
- if (Output.enable_ansi_colors) {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {};
- } else {
- ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {};
- }
- Output.prettyErrorln("Error loading current directory", .{});
- Output.flush();
- return error.CouldntReadCurrentDirectory;
- };
-
- var package_json_dir: string = "";
-
- {
- this_bundler.env.loadProcess();
-
- if (this_bundler.env.map.get("NODE_ENV")) |node_env| {
- if (strings.eqlComptime(node_env, "production")) {
- this_bundler.options.production = true;
- }
- }
-
- // Run .env in the root dir
- this_bundler.runEnvLoader() catch {};
-
- if (root_dir_info.getEntries()) |dir| {
-
- // Run .env again if it exists in a parent dir
- if (this_bundler.options.production) {
- this_bundler.env.load(&this_bundler.fs.fs, dir, false) catch {};
- } else {
- this_bundler.env.load(&this_bundler.fs.fs, dir, true) catch {};
- }
- }
- }
-
- var bin_dirs = this_bundler.resolver.binDirs();
-
- if (root_dir_info.enclosing_package_json) |package_json| {
- if (root_dir_info.package_json == null) {
- // no trailing slash
- package_json_dir = std.mem.trimRight(u8, package_json.source.path.name.dir, "/");
- }
- }
-
- var PATH = this_bundler.env.map.get("PATH") orelse "";
-
- var ORIGINAL_PATH = PATH;
-
- if (bin_dirs.len > 0 or package_json_dir.len > 0) {
- var new_path_len: usize = PATH.len + 2;
- for (bin_dirs) |bin| {
- new_path_len += bin.len + 1;
- }
-
- if (package_json_dir.len > 0) {
- new_path_len += package_json_dir.len + 1;
- }
-
- var new_path = try std.ArrayList(u8).initCapacity(ctx.allocator, new_path_len);
-
- {
- var needs_colon = false;
- if (package_json_dir.len > 0) {
- defer needs_colon = true;
- if (needs_colon) {
- try new_path.append(':');
- }
- try new_path.appendSlice(package_json_dir);
- }
-
- var bin_dir_i: i32 = @intCast(i32, bin_dirs.len) - 1;
- // Iterate in reverse order
- // Directories are added to bin_dirs in top-down order
- // That means the parent-most node_modules/.bin will be first
- while (bin_dir_i >= 0) : (bin_dir_i -= 1) {
- defer needs_colon = true;
- if (needs_colon) {
- try new_path.append(':');
- }
- try new_path.appendSlice(bin_dirs[@intCast(usize, bin_dir_i)]);
- }
-
- if (needs_colon) {
- try new_path.append(':');
- }
- try new_path.appendSlice(PATH);
- }
-
- this_bundler.env.map.put("PATH", new_path.items) catch unreachable;
- PATH = new_path.items;
- }
-
- this_bundler.env.loadNodeJSConfig(this_bundler.fs) catch {};
- this_bundler.env.map.putDefault("npm_config_local_prefix", this_bundler.fs.top_level_dir) catch unreachable;
-
- // we have no way of knowing what version they're expecting without running the node executable
- // running the node executable is too slow
- // so we will just hardcode it to LTS
- this_bundler.env.map.putDefault(
- "npm_config_user_agent",
- // the use of npm/? is copying yarn
- // e.g.
- // > "yarn/1.22.4 npm/? node/v12.16.3 darwin x64",
- "bun/" ++ Global.package_json_version ++ " npm/? node/v16.14.0 " ++ Global.os_name ++ " " ++ Global.arch_name,
- ) catch unreachable;
-
- if (this_bundler.env.get("npm_execpath") == null) {
- // we don't care if this fails
- if (std.fs.selfExePathAlloc(ctx.allocator)) |self_exe_path| {
- this_bundler.env.map.putDefault("npm_execpath", self_exe_path) catch unreachable;
- } else |_| {}
- }
-
var did_print = false;
+ var ORIGINAL_PATH: string = "";
+ var this_bundler: bundler.Bundler = undefined;
+ var root_dir_info = try configureEnvForRun(ctx, &this_bundler, null, &ORIGINAL_PATH, log_errors);
if (root_dir_info.enclosing_package_json) |package_json| {
- if (package_json.name.len > 0) {
- if (this_bundler.env.map.get(NpmArgs.package_name) == null) {
- this_bundler.env.map.put(NpmArgs.package_name, package_json.name) catch unreachable;
- }
- }
-
- this_bundler.env.map.putDefault("npm_package_json", package_json.source.path.text) catch unreachable;
-
- if (package_json.version.len > 0) {
- if (this_bundler.env.map.get(NpmArgs.package_version) == null) {
- this_bundler.env.map.put(NpmArgs.package_version, package_json.version) catch unreachable;
- }
- }
-
if (package_json.scripts) |scripts| {
switch (script_name_to_search.len) {
0 => {
@@ -861,7 +870,7 @@ pub const RunCommand = struct {
if (scripts.get(temp_script_buffer[1..])) |prescript| {
if (!try runPackageScript(
- ctx,
+ ctx.allocator,
prescript,
temp_script_buffer[1..],
this_bundler.fs.top_level_dir,
@@ -874,7 +883,7 @@ pub const RunCommand = struct {
}
if (!try runPackageScript(
- ctx,
+ ctx.allocator,
script_content,
script_name_to_search,
this_bundler.fs.top_level_dir,
@@ -887,7 +896,7 @@ pub const RunCommand = struct {
if (scripts.get(temp_script_buffer)) |postscript| {
if (!try runPackageScript(
- ctx,
+ ctx.allocator,
postscript,
temp_script_buffer,
this_bundler.fs.top_level_dir,
@@ -915,6 +924,7 @@ pub const RunCommand = struct {
return false;
}
+ const PATH = this_bundler.env.map.get("PATH") orelse "";
var path_for_which = PATH;
if (comptime bin_dirs_only) {
path_for_which = "";
diff --git a/src/install/install.zig b/src/install/install.zig
index 7373cf90d..5fc2c71c8 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -47,7 +47,7 @@ const ExtractTarball = @import("./extract_tarball.zig");
const Npm = @import("./npm.zig");
const Bitset = @import("./bit_set.zig").DynamicBitSetUnmanaged;
const z_allocator = @import("../memory_allocator.zig").z_allocator;
-
+const RunCommand = @import("../cli/run_command.zig").RunCommand;
threadlocal var initialized_store = false;
pub const Lockfile = @import("./lockfile.zig");
@@ -2941,6 +2941,10 @@ pub const PackageManager = struct {
this.local_package_features.dev_dependencies = false;
}
+ if (cli.global or cli.ignore_scripts) {
+ this.do.run_scripts = false;
+ }
+
this.local_package_features.optional_dependencies = !cli.omit.optional;
const disable_progress_bar = default_disable_progress_bar or cli.no_progress;
@@ -3006,6 +3010,7 @@ pub const PackageManager = struct {
save_lockfile: bool = true,
load_lockfile: bool = true,
install_packages: bool = true,
+ run_scripts: bool = true,
save_yarn_lock: bool = false,
print_meta_hash_string: bool = false,
verify_integrity: bool = true,
@@ -3406,6 +3411,7 @@ pub const PackageManager = struct {
clap.parseParam("--verbose Excessively verbose logging") catch unreachable,
clap.parseParam("--no-progress Disable the progress bar") catch unreachable,
clap.parseParam("--no-verify Skip verifying integrity of newly downloaded packages") catch unreachable,
+ clap.parseParam("--ignore-scripts Skip lifecycle scripts in the project's package.json (dependency scripts are never run)") catch unreachable,
clap.parseParam("-g, --global Install globally") catch unreachable,
clap.parseParam("--cwd <STR> Set a specific cwd") catch unreachable,
clap.parseParam("--backend <STR> Platform-specific optimizations for installing dependencies. For macOS, \"clonefile\" (default), \"copyfile\"") catch unreachable,
@@ -3454,6 +3460,7 @@ pub const PackageManager = struct {
verbose: bool = false,
no_progress: bool = false,
no_verify: bool = false,
+ ignore_scripts: bool = false,
link_native_bins: []const string = &[_]string{},
@@ -3516,6 +3523,7 @@ pub const PackageManager = struct {
cli.no_cache = args.flag("--no-cache");
cli.silent = args.flag("--silent");
cli.verbose = args.flag("--verbose");
+ cli.ignore_scripts = args.flag("--ignore-scripts");
if (args.option("--config")) |opt| {
cli.config = opt;
@@ -4710,6 +4718,7 @@ pub const PackageManager = struct {
.is_main = true,
.check_for_duplicate_dependencies = true,
.peer_dependencies = false,
+ .scripts = true,
},
);
@@ -4834,6 +4843,7 @@ pub const PackageManager = struct {
.is_main = true,
.check_for_duplicate_dependencies = true,
.peer_dependencies = false,
+ .scripts = true,
},
);
@@ -4972,6 +4982,7 @@ pub const PackageManager = struct {
manager.progress.refresh();
}
+
manager.lockfile.saveToDisk(manager.options.save_lockfile_path);
if (comptime log_level.showProgress()) {
node.end();
@@ -4985,6 +4996,38 @@ pub const PackageManager = struct {
}
}
+ // Install script order for npm 8.3.0:
+ // 1. preinstall
+ // 2. install
+ // 3. postinstall
+ // 4. preprepare
+ // 5. prepare
+ // 6. postprepare
+
+ const run_lifecycle_scripts = manager.options.do.run_scripts and manager.lockfile.scripts.hasAny() and manager.options.do.install_packages;
+ const has_pre_lifecycle_scripts = manager.lockfile.scripts.preinstall.items.len > 0;
+ const needs_configure_bundler_for_run = run_lifecycle_scripts and !has_pre_lifecycle_scripts;
+
+ if (run_lifecycle_scripts and has_pre_lifecycle_scripts) {
+ // We need to figure out the PATH and other environment variables
+ // to do that, we re-use the code from bun run
+ // this is expensive, it traverses the entire directory tree going up to the root
+ // so we really only want to do it when strictly necessary
+ {
+ var this_bundler: bundler.Bundler = undefined;
+ var ORIGINAL_PATH: string = "";
+ _ = try RunCommand.configureEnvForRun(
+ ctx,
+ &this_bundler,
+ manager.env,
+ &ORIGINAL_PATH,
+ log_level != .silent,
+ );
+ }
+
+ try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "preinstall");
+ }
+
var install_summary = PackageInstall.Summary{};
if (manager.options.do.install_packages) {
install_summary = try manager.installPackages(
@@ -5091,6 +5134,57 @@ pub const PackageManager = struct {
Output.prettyln("<r> Failed to install <red><b>{d}<r> packages\n", .{install_summary.fail});
}
+ if (run_lifecycle_scripts and install_summary.fail == 0) {
+ // We need to figure out the PATH and other environment variables
+ // to do that, we re-use the code from bun run
+ // this is expensive, it traverses the entire directory tree going up to the root
+ // so we really only want to do it when strictly necessary
+ if (needs_configure_bundler_for_run) {
+ var this_bundler: bundler.Bundler = undefined;
+ var ORIGINAL_PATH: string = "";
+ _ = try RunCommand.configureEnvForRun(
+ ctx,
+ &this_bundler,
+ manager.env,
+ &ORIGINAL_PATH,
+ log_level != .silent,
+ );
+ } else {
+ // bun install may have installed new bins, so we need to update the PATH
+ // this can happen if node_modules/.bin didn't previously exist
+ // note: it is harmless to have the same directory in the PATH multiple times
+ const current_path = manager.env.map.get("PATH");
+
+ // TODO: windows
+ const cwd_without_trailing_slash = if (Fs.FileSystem.instance.top_level_dir.len > 1 and Fs.FileSystem.instance.top_level_dir[Fs.FileSystem.instance.top_level_dir.len - 1] == '/')
+ Fs.FileSystem.instance.top_level_dir[0 .. Fs.FileSystem.instance.top_level_dir.len - 1]
+ else
+ Fs.FileSystem.instance.top_level_dir;
+
+ try manager.env.map.put("PATH", try std.fmt.allocPrint(
+ ctx.allocator,
+ "{s}:{s}/node_modules/.bin",
+ .{
+ current_path,
+ cwd_without_trailing_slash,
+ },
+ ));
+ }
+
+ // 1. preinstall
+ // 2. install
+ // 3. postinstall
+ try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "install");
+ try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "postinstall");
+
+ // 4. preprepare
+ // 5. prepare
+ // 6. postprepare
+ try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "preprepare");
+ try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "prepare");
+ try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "postprepare");
+ }
+
if (!printed_timestamp) {
Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp());
Output.prettyln("<d> done<r>", .{});
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index 27269c690..eb53bb763 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -106,9 +106,40 @@ string_pool: StringPool,
allocator: std.mem.Allocator,
scratch: Scratch = Scratch{},
+scripts: Scripts = .{},
+
const Stream = std.io.FixedBufferStream([]u8);
pub const default_filename = "bun.lockb";
+pub const Scripts = struct {
+ const StringArrayList = std.ArrayListUnmanaged(string);
+ const RunCommand = @import("../cli/run_command.zig").RunCommand;
+
+ preinstall: StringArrayList = .{},
+ install: StringArrayList = .{},
+ postinstall: StringArrayList = .{},
+ preprepare: StringArrayList = .{},
+ prepare: StringArrayList = .{},
+ postprepare: StringArrayList = .{},
+
+ pub fn hasAny(this: Scripts) bool {
+ return (this.preinstall.items.len +
+ this.install.items.len +
+ this.postinstall.items.len +
+ this.preprepare.items.len +
+ this.prepare.items.len +
+ this.postprepare.items.len) > 0;
+ }
+
+ pub fn run(this: Scripts, allocator: std.mem.Allocator, env: *DotEnv.Loader, silent: bool, comptime hook: []const u8) !void {
+ for (@field(this, hook).items) |script| {
+ std.debug.assert(Fs.FileSystem.instance_loaded);
+ const cwd = Fs.FileSystem.instance.top_level_dir;
+ _ = try RunCommand.runPackageScript(allocator, script, hook, cwd, env, &.{}, silent);
+ }
+ }
+};
+
pub fn isEmpty(this: *const Lockfile) bool {
return this.packages.len == 0 or this.packages.len == 1 or this.packages.get(0).resolutions.len == 0;
}
@@ -155,6 +186,7 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: std.mem.Allocator, l
this.workspace_path = "";
this.format = FormatVersion.current;
+ this.scripts = .{};
Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| {
return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } };
@@ -547,7 +579,7 @@ fn preprocessUpdateRequests(old: *Lockfile, updates: []PackageManager.UpdateRequ
}
pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile {
-
+ const old_scripts = old.scripts;
// We will only shrink the number of packages here.
// never grow
@@ -649,7 +681,7 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
}
}
}
-
+ new.scripts = old_scripts;
return new;
}
@@ -1441,6 +1473,7 @@ pub fn initEmpty(this: *Lockfile, allocator: std.mem.Allocator) !void {
.string_pool = StringPool.init(allocator),
.allocator = allocator,
.scratch = Scratch.init(allocator),
+ .scripts = .{},
};
}
@@ -2229,6 +2262,38 @@ pub const Package = extern struct {
}
}
+ if (comptime features.scripts) {
+ if (json.asProperty("scripts")) |scripts_prop| {
+ if (scripts_prop.expr.data == .e_object) {
+ const scripts = .{
+ "install",
+ "postinstall",
+ "postprepare",
+ "preinstall",
+ "prepare",
+ "preprepare",
+ };
+
+ inline for (scripts) |script_name| {
+ if (scripts_prop.expr.get(script_name)) |script| {
+ if (script.asString(allocator)) |input| {
+ var list = @field(lockfile.scripts, script_name);
+ if (list.capacity == 0) {
+ list.capacity = 1;
+ list.items = try allocator.alloc(string, 1);
+ list.items[0] = input;
+ } else {
+ try list.append(allocator, input);
+ }
+
+ @field(lockfile.scripts, script_name) = list;
+ }
+ }
+ }
+ }
+ }
+ }
+
if (comptime ResolverContext != void) {
resolver.count(*Lockfile.StringBuilder, &string_builder, json);
}