aboutsummaryrefslogtreecommitdiff
path: root/src/resolver
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-11-06 21:42:05 -0800
committerGravatar GitHub <noreply@github.com> 2022-11-06 21:42:05 -0800
commite45f72e8e422191adeb4fd1bad896dc6a47c76b3 (patch)
tree3a76da8b343c081dba84e0ac95f3c2cc2423106a /src/resolver
parent645cf903350a7fe5f5076100b7c4a6bc8cd1b431 (diff)
downloadbun-e45f72e8e422191adeb4fd1bad896dc6a47c76b3.tar.gz
bun-e45f72e8e422191adeb4fd1bad896dc6a47c76b3.tar.zst
bun-e45f72e8e422191adeb4fd1bad896dc6a47c76b3.zip
Automatically install npm packages when running a script in Bun's runtime (#1459)
* Update bundler.zig * WIP * Update README.md * Update README.md * wip * Support running scripts without package.json * Add `--no-auto-install` and `--prefer-offline` flags * WIP * wip * Update headers-handwritten.h * WIP * Build fixes * Fix UAF * Update install.zig * Must call .allocate() * Micro-optimization: only call .timestamp() once per tick when installing packages * Support progress bar * Extend the timestamp for package staleness checks to 1 day * Add `--prefer-latest`, `-i` CLI Flags * Fix crash * Support line text manually being set on an Error instance * Add a few more fields for error messages * Fix bug when counting 8 character strings in string builder * Implement error handling for automatic package installs! * Fix crash * Make it say module when there's a slash * Update module_loader.zig * Ban dependency versions in import specifiers when a package.json is present * Remove unused field * Update README.md * Update README.md * Update README.md * Update README.md Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Diffstat (limited to 'src/resolver')
-rw-r--r--src/resolver/dir_info.zig27
-rw-r--r--src/resolver/package_json.zig326
-rw-r--r--src/resolver/resolver.zig1100
3 files changed, 1213 insertions, 240 deletions
diff --git a/src/resolver/dir_info.zig b/src/resolver/dir_info.zig
index 86d165df9..0d1bac6a7 100644
--- a/src/resolver/dir_info.zig
+++ b/src/resolver/dir_info.zig
@@ -1,4 +1,5 @@
const bun = @import("../global.zig");
+const std = @import("std");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
@@ -35,17 +36,37 @@ enclosing_tsconfig_json: ?*const TSConfigJSON = null,
/// https://github.com/oven-sh/bun/issues/229
enclosing_package_json: ?*PackageJSON = null,
+package_json_for_dependencies: ?*PackageJSON = null,
+
abs_path: string = "",
entries: Index = undefined,
-has_node_modules: bool = false, // Is there a "node_modules" subdirectory?
-is_node_modules: bool = false, // Is this a "node_modules" directory?
package_json: ?*PackageJSON = null, // Is there a "package.json" file?
tsconfig_json: ?*TSConfigJSON = null, // Is there a "tsconfig.json" file in this directory or a parent directory?
abs_real_path: string = "", // If non-empty, this is the real absolute path resolving any symlinks
+flags: Flags.Set = Flags.Set{},
+
+/// Is there a "node_modules" subdirectory?
+pub inline fn hasNodeModules(this: *const DirInfo) bool {
+ return this.flags.contains(.has_node_modules);
+}
+/// Is this a "node_modules" directory?
+pub inline fn isNodeModules(this: *const DirInfo) bool {
+ return this.flags.contains(.is_node_modules);
+}
+
+pub const Flags = enum {
+ /// This directory is a node_modules directory
+ is_node_modules,
+ /// This directory has a node_modules subdirectory
+ has_node_modules,
+
+ pub const Set = std.enums.EnumSet(Flags);
+};
+
pub fn hasParentPackage(this: *const DirInfo) bool {
const parent = this.getParent() orelse return false;
- return !parent.is_node_modules;
+ return !parent.isNodeModules();
}
pub fn getFileDescriptor(dirinfo: *const DirInfo) StoredFileDescriptorType {
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index e33c5ac60..034debe59 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -28,6 +28,26 @@ pub const MacroImportReplacementMap = std.StringArrayHashMap(string);
pub const MacroMap = std.StringArrayHashMapUnmanaged(MacroImportReplacementMap);
const ScriptsMap = std.StringArrayHashMap(string);
+const Semver = @import("../install/semver.zig");
+const Dependency = @import("../install/dependency.zig");
+const String = @import("../install/semver.zig").String;
+const Version = Semver.Version;
+const Install = @import("../install/install.zig");
+const FolderResolver = @import("../install/resolvers/folder_resolver.zig");
+
+const Architecture = @import("../install/npm.zig").Architecture;
+const OperatingSystem = @import("../install/npm.zig").OperatingSystem;
+pub const DependencyMap = struct {
+ map: HashMap = .{},
+ source_buf: []const u8 = "",
+
+ pub const HashMap = std.ArrayHashMapUnmanaged(
+ String,
+ Dependency,
+ String.ArrayHashContext,
+ false,
+ );
+};
pub const PackageJSON = struct {
pub const LoadFramework = enum {
@@ -85,6 +105,12 @@ pub const PackageJSON = struct {
scripts: ?*ScriptsMap = null,
+ arch: Architecture = Architecture.all,
+ os: OperatingSystem = OperatingSystem.all,
+
+ package_manager_package_id: Install.PackageID = Install.invalid_package_id,
+ dependencies: DependencyMap = .{},
+
// Present if the "browser" field is present. This field is intended to be
// used by bundlers and lets you redirect the paths of certain 3rd-party
// modules that don't work in the browser to other modules that shim that
@@ -538,12 +564,13 @@ pub const PackageJSON = struct {
}
pub fn parse(
- comptime ResolverType: type,
- r: *ResolverType,
+ r: *resolver.Resolver,
input_path: string,
dirname_fd: StoredFileDescriptorType,
- comptime generate_hash: bool,
+ package_id: ?Install.PackageID,
comptime include_scripts: bool,
+ comptime include_dependencies: @Type(.EnumLiteral),
+ comptime generate_hash: bool,
) ?PackageJSON {
// TODO: remove this extra copy
@@ -566,7 +593,7 @@ pub const PackageJSON = struct {
};
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The file \"{s}\" exists", .{package_json_path}) catch unreachable;
+ debug.addNoteFmt("The file \"{s}\" exists", .{package_json_path});
}
const key_path = fs.Path.init(package_json_path);
@@ -716,6 +743,160 @@ pub const PackageJSON = struct {
}
}
+ if (comptime include_dependencies == .main or include_dependencies == .local) {
+ update_dependencies: {
+ if (package_id) |pkg| {
+ package_json.package_manager_package_id = pkg;
+ break :update_dependencies;
+ }
+
+ // // if there is a name & version, check if the lockfile has the package
+ if (package_json.name.len > 0 and package_json.version.len > 0) {
+ if (r.package_manager) |pm| {
+ const tag = Dependency.Version.Tag.infer(package_json.version);
+
+ if (tag == .npm) {
+ const sliced = Semver.SlicedString.init(package_json.version, package_json.version);
+ if (Dependency.parseWithTag(r.allocator, package_json.version, .npm, &sliced, r.log)) |dependency_version| {
+ if (dependency_version.value.npm.isExact()) {
+ if (pm.lockfile.resolve(package_json.name, dependency_version)) |resolved| {
+ package_json.package_manager_package_id = resolved;
+ if (resolved > 0) {
+ break :update_dependencies;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ if (json.get("cpu")) |os_field| {
+ var first = true;
+ if (os_field.asArray()) |*array| {
+ while (array.next()) |item| {
+ if (item.asString(bun.default_allocator)) |str| {
+ if (first) {
+ package_json.arch = Architecture.none;
+ first = false;
+ }
+ package_json.arch = package_json.arch.apply(str);
+ }
+ }
+ }
+ }
+
+ if (json.get("os")) |os_field| {
+ var first = true;
+ if (os_field.asArray()) |*array| {
+ while (array.next()) |item| {
+ if (item.asString(bun.default_allocator)) |str| {
+ if (first) {
+ package_json.os = OperatingSystem.none;
+ first = false;
+ }
+ package_json.os = package_json.os.apply(str);
+ }
+ }
+ }
+ }
+
+ const DependencyGroup = Install.Lockfile.Package.DependencyGroup;
+ const features = .{
+ .dependencies = true,
+ .dev_dependencies = include_dependencies == .main,
+ .optional_dependencies = false,
+ .peer_dependencies = false,
+ };
+
+ const dependency_groups = comptime brk: {
+ var out_groups: [
+ @as(usize, @boolToInt(features.dependencies)) +
+ @as(usize, @boolToInt(features.dev_dependencies)) +
+ @as(usize, @boolToInt(features.optional_dependencies)) +
+ @as(usize, @boolToInt(features.peer_dependencies))
+ ]DependencyGroup = undefined;
+ var out_group_i: usize = 0;
+ if (features.dependencies) {
+ out_groups[out_group_i] = DependencyGroup.dependencies;
+ out_group_i += 1;
+ }
+
+ if (features.dev_dependencies) {
+ out_groups[out_group_i] = DependencyGroup.dev;
+ out_group_i += 1;
+ }
+ if (features.optional_dependencies) {
+ out_groups[out_group_i] = DependencyGroup.optional;
+ out_group_i += 1;
+ }
+
+ if (features.peer_dependencies) {
+ out_groups[out_group_i] = DependencyGroup.peer;
+ out_group_i += 1;
+ }
+
+ break :brk out_groups;
+ };
+
+ var total_dependency_count: usize = 0;
+ inline for (dependency_groups) |group| {
+ if (json.get(group.field)) |group_json| {
+ if (group_json.data == .e_object) {
+ total_dependency_count += group_json.data.e_object.properties.len;
+ }
+ }
+ }
+
+ if (total_dependency_count > 0) {
+ package_json.dependencies.map = DependencyMap.HashMap{};
+ package_json.dependencies.source_buf = json_source.contents;
+ const ctx = String.ArrayHashContext{
+ .a_buf = json_source.contents,
+ .b_buf = json_source.contents,
+ };
+ package_json.dependencies.map.ensureTotalCapacityContext(
+ r.allocator,
+ total_dependency_count,
+ ctx,
+ ) catch unreachable;
+
+ inline for (dependency_groups) |group| {
+ if (json.get(group.field)) |group_json| {
+ if (group_json.data == .e_object) {
+ var group_obj = group_json.data.e_object;
+ for (group_obj.properties.slice()) |*prop| {
+ const name = prop.key orelse continue;
+ const name_str = name.asString(r.allocator) orelse continue;
+ const version_value = prop.value orelse continue;
+ const version_str = version_value.asString(r.allocator) orelse continue;
+ const sliced_str = Semver.SlicedString.init(version_str, version_str);
+
+ if (Dependency.parse(
+ r.allocator,
+ version_str,
+ &sliced_str,
+ r.log,
+ )) |dependency_version| {
+ const dependency = Dependency{
+ .name = String.init(name_str, name_str),
+ .version = dependency_version,
+ .name_hash = bun.hash(name_str),
+ .behavior = group.behavior,
+ };
+ package_json.dependencies.map.putAssumeCapacityContext(
+ dependency.name,
+ dependency,
+ ctx,
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
// used by `bun run`
if (include_scripts) {
read_scripts: {
@@ -1043,8 +1224,49 @@ pub const ESModule = struct {
pub const Package = struct {
name: string,
+ version: string = "",
subpath: string,
+ pub const External = struct {
+ name: Semver.String = .{},
+ version: Semver.String = .{},
+ subpath: Semver.String = .{},
+ };
+
+ pub fn count(this: Package, builder: *Semver.String.Builder) void {
+ builder.count(this.name);
+ builder.count(this.version);
+ builder.count(this.subpath);
+ }
+
+ pub fn clone(this: Package, builder: *Semver.String.Builder) External {
+ return .{
+ .name = builder.appendUTF8WithoutPool(Semver.String, this.name, 0),
+ .version = builder.appendUTF8WithoutPool(Semver.String, this.version, 0),
+ .subpath = builder.appendUTF8WithoutPool(Semver.String, this.subpath, 0),
+ };
+ }
+
+ pub fn toExternal(this: Package, buffer: []const u8) External {
+ return .{
+ .name = Semver.String.init(buffer, this.name),
+ .version = Semver.String.init(buffer, this.version),
+ .subpath = Semver.String.init(buffer, this.subpath),
+ };
+ }
+
+ pub fn withAutoVersion(this: Package) Package {
+ if (this.version.len == 0) {
+ return .{
+ .name = this.name,
+ .subpath = this.subpath,
+ .version = ">=0.0.0",
+ };
+ }
+
+ return this;
+ }
+
pub fn parseName(specifier: string) ?string {
var slash = strings.indexOfCharNeg(specifier, '/');
if (!strings.startsWithChar(specifier, '@')) {
@@ -1059,6 +1281,27 @@ pub const ESModule = struct {
}
}
+ pub fn parseVersion(specifier_after_name: string) ?string {
+ if (strings.indexOfChar(specifier_after_name, '/')) |slash| {
+ // "foo@/bar" is not a valid specifier\
+ // "foo@/" is not a valid specifier
+ // "foo/@/bar" is not a valid specifier
+ // "foo@1/bar" is a valid specifier
+ // "foo@^123.2.3+ba-ab/bar" is a valid specifier
+ // ^^^^^^^^^^^^^^
+ // this is the version
+
+ const remainder = specifier_after_name[0..slash];
+ if (remainder.len > 0 and remainder[0] == '@') {
+ return remainder[1..];
+ }
+
+ return remainder;
+ }
+
+ return null;
+ }
+
pub fn parse(specifier: string, subpath_buf: []u8) ?Package {
if (specifier.len == 0) return null;
var package = Package{ .name = parseName(specifier) orelse return null, .subpath = "" };
@@ -1066,11 +1309,30 @@ pub const ESModule = struct {
if (strings.startsWith(package.name, ".") or strings.indexAnyComptime(package.name, "\\%") != null)
return null;
- std.mem.copy(u8, subpath_buf[1..], specifier[package.name.len..]);
- subpath_buf[0] = '.';
- package.subpath = subpath_buf[0 .. specifier[package.name.len..].len + 1];
+ const offset: usize = if (package.name.len == 0 or package.name[0] != '@') 0 else 1;
+ if (strings.indexOfChar(specifier[offset..], '@')) |at| {
+ package.version = parseVersion(specifier[offset..][at..]) orelse "";
+ if (package.version.len == 0) {
+ package.version = specifier[offset..][at..];
+ if (package.version.len > 0 and package.version[0] == '@') {
+ package.version = package.version[1..];
+ }
+ }
+ package.name = specifier[0 .. at + offset];
+
+ parseSubpath(&package.subpath, specifier[@minimum(package.name.len + package.version.len + 1, specifier.len)..], subpath_buf);
+ } else {
+ parseSubpath(&package.subpath, specifier[package.name.len..], subpath_buf);
+ }
+
return package;
}
+
+ pub fn parseSubpath(subpath: *[]const u8, specifier: string, subpath_buf: []u8) void {
+ std.mem.copy(u8, subpath_buf[1..], specifier);
+ subpath_buf[0] = '.';
+ subpath.* = subpath_buf[0 .. specifier.len + 1];
+ }
};
const ReverseKind = enum { exact, pattern, prefix };
@@ -1170,7 +1432,7 @@ pub const ESModule = struct {
) Resolution {
if (exports.data == .invalid) {
if (r.debug_logs) |logs| {
- logs.addNote("Invalid package configuration") catch unreachable;
+ logs.addNote("Invalid package configuration");
}
return Resolution{ .status = .InvalidPackageConfiguration, .debug = .{ .token = exports.first_token } };
@@ -1210,7 +1472,7 @@ pub const ESModule = struct {
}
if (r.debug_logs) |logs| {
- logs.addNoteFmt("The path \"{s}\" was not exported", .{subpath}) catch unreachable;
+ logs.addNoteFmt("The path \"{s}\" was not exported", .{subpath});
}
return Resolution{ .status = .PackagePathNotExported, .debug = .{ .token = exports.first_token } };
@@ -1224,13 +1486,13 @@ pub const ESModule = struct {
package_url: string,
) Resolution {
if (r.debug_logs) |logs| {
- logs.addNoteFmt("Checking object path map for \"{s}\"", .{match_key}) catch unreachable;
+ logs.addNoteFmt("Checking object path map for \"{s}\"", .{match_key});
}
if (!strings.endsWithChar(match_key, '.')) {
if (match_obj.valueForKey(match_key)) |target| {
if (r.debug_logs) |log| {
- log.addNoteFmt("Found \"{s}\"", .{match_key}) catch unreachable;
+ log.addNoteFmt("Found \"{s}\"", .{match_key});
}
return r.resolveTarget(package_url, target, "", is_imports, false);
@@ -1248,7 +1510,7 @@ pub const ESModule = struct {
const target = expansion.value;
const subpath = match_key[expansion.key.len - 1 ..];
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath }) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath });
}
return r.resolveTarget(package_url, target, subpath, is_imports, true);
@@ -1259,7 +1521,7 @@ pub const ESModule = struct {
const target = expansion.value;
const subpath = match_key[expansion.key.len..];
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath }) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath });
}
var result = r.resolveTarget(package_url, target, subpath, is_imports, false);
@@ -1273,13 +1535,13 @@ pub const ESModule = struct {
}
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" did not match", .{expansion.key}) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" did not match", .{expansion.key});
}
}
}
if (r.debug_logs) |log| {
- log.addNoteFmt("No keys matched \"{s}\"", .{match_key}) catch unreachable;
+ log.addNoteFmt("No keys matched \"{s}\"", .{match_key});
}
return Resolution{
@@ -1301,12 +1563,12 @@ pub const ESModule = struct {
switch (target.data) {
.string => |str| {
if (r.debug_logs) |log| {
- log.addNoteFmt("Checking path \"{s}\" against target \"{s}\"", .{ subpath, str }) catch unreachable;
- log.increaseIndent() catch unreachable;
+ log.addNoteFmt("Checking path \"{s}\" against target \"{s}\"", .{ subpath, str });
+ log.increaseIndent();
}
defer {
if (r.debug_logs) |log| {
- log.decreaseIndent() catch unreachable;
+ log.decreaseIndent();
}
}
@@ -1315,7 +1577,7 @@ pub const ESModule = struct {
if (comptime !pattern) {
if (subpath.len > 0 and !strings.endsWithChar(str, '/')) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it doesn't end with a \"/\"", .{str}) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it doesn't end with a \"/\"", .{str});
}
return Resolution{ .path = str, .status = .InvalidModuleSpecifier, .debug = .{ .token = target.first_token } };
@@ -1325,7 +1587,7 @@ pub const ESModule = struct {
// If target does not start with "./", then...
if (!strings.startsWith(str, "./")) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it doesn't start with a \"./\"", .{str}) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it doesn't start with a \"./\"", .{str});
}
if (internal and !strings.hasPrefixComptime(str, "../") and !strings.hasPrefix(str, "/")) {
@@ -1335,7 +1597,7 @@ pub const ESModule = struct {
_ = std.mem.replace(u8, str, "*", subpath, &resolve_target_buf2);
const result = resolve_target_buf2[0..len];
if (r.debug_logs) |log| {
- log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, str, result }) catch unreachable;
+ log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, str, result });
}
return Resolution{ .path = result, .status = .PackageResolve, .debug = .{ .token = target.first_token } };
@@ -1343,7 +1605,7 @@ pub const ESModule = struct {
var parts2 = [_]string{ str, subpath };
const result = resolve_path.joinStringBuf(&resolve_target_buf2, parts2, .auto);
if (r.debug_logs) |log| {
- log.addNoteFmt("Resolved \".{s}\" to \".{s}\"", .{ str, result }) catch unreachable;
+ log.addNoteFmt("Resolved \".{s}\" to \".{s}\"", .{ str, result });
}
return Resolution{ .path = result, .status = .PackageResolve, .debug = .{ .token = target.first_token } };
@@ -1357,7 +1619,7 @@ pub const ESModule = struct {
// segments after the first segment, throw an Invalid Package Target error.
if (findInvalidSegment(str)) |invalid| {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid }) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid });
}
return Resolution{ .path = str, .status = .InvalidPackageTarget, .debug = .{ .token = target.first_token } };
@@ -1371,7 +1633,7 @@ pub const ESModule = struct {
// segments after the first segment, throw an Invalid Package Target error.
if (findInvalidSegment(resolved_target)) |invalid| {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid }) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid });
}
return Resolution{ .path = str, .status = .InvalidModuleSpecifier, .debug = .{ .token = target.first_token } };
@@ -1383,7 +1645,7 @@ pub const ESModule = struct {
_ = std.mem.replace(u8, resolved_target, "*", subpath, &resolve_target_buf2);
const result = resolve_target_buf2[0..len];
if (r.debug_logs) |log| {
- log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result }) catch unreachable;
+ log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result });
}
return Resolution{ .path = result, .status = .Exact, .debug = .{ .token = target.first_token } };
@@ -1391,7 +1653,7 @@ pub const ESModule = struct {
var parts2 = [_]string{ package_url, str, subpath };
const result = resolve_path.joinStringBuf(&resolve_target_buf2, parts2, .auto);
if (r.debug_logs) |log| {
- log.addNoteFmt("Substituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result }) catch unreachable;
+ log.addNoteFmt("Substituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result });
}
return Resolution{ .path = result, .status = .Exact, .debug = .{ .token = target.first_token } };
@@ -1406,7 +1668,7 @@ pub const ESModule = struct {
for (keys) |key, i| {
if (strings.eqlComptime(key, "default") or r.conditions.contains(key)) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" matched", .{key}) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" matched", .{key});
}
var result = r.resolveTarget(package_url, slice.items(.value)[i], subpath, internal, pattern);
@@ -1420,12 +1682,12 @@ pub const ESModule = struct {
}
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" did not match", .{key}) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" did not match", .{key});
}
}
if (r.debug_logs) |log| {
- log.addNoteFmt("No keys matched", .{}) catch unreachable;
+ log.addNoteFmt("No keys matched", .{});
}
var return_target = target;
@@ -1489,7 +1751,7 @@ pub const ESModule = struct {
.array => |array| {
if (array.len == 0) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The path \"{s}\" is an empty array", .{subpath}) catch unreachable;
+ log.addNoteFmt("The path \"{s}\" is an empty array", .{subpath});
}
return Resolution{ .path = "", .status = .Null, .debug = .{ .token = target.first_token } };
@@ -1517,7 +1779,7 @@ pub const ESModule = struct {
},
.@"null" => {
if (r.debug_logs) |log| {
- log.addNoteFmt("The path \"{s}\" is null", .{subpath}) catch unreachable;
+ log.addNoteFmt("The path \"{s}\" is null", .{subpath});
}
return Resolution{ .path = "", .status = .Null, .debug = .{ .token = target.first_token } };
@@ -1526,7 +1788,7 @@ pub const ESModule = struct {
}
if (r.debug_logs) |logs| {
- logs.addNoteFmt("Invalid package target for path \"{s}\"", .{subpath}) catch unreachable;
+ logs.addNoteFmt("Invalid package target for path \"{s}\"", .{subpath});
}
return Resolution{ .status = .InvalidPackageTarget, .debug = .{ .token = target.first_token } };
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 2c14089ee..a6e6f9b94 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -38,6 +38,14 @@ const allocators = @import("../allocators.zig");
const Msg = logger.Msg;
const Path = Fs.Path;
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
+const PackageManager = @import("../install/install.zig").PackageManager;
+const Dependency = @import("../install/dependency.zig");
+const Install = @import("../install/install.zig");
+const Lockfile = @import("../install/lockfile.zig").Lockfile;
+const Package = @import("../install/lockfile.zig").Package;
+const Resolution = @import("../install/resolution.zig").Resolution;
+const Semver = @import("../install/semver.zig");
+const DotEnv = @import("../env_loader.zig");
pub fn isPackagePath(path: string) bool {
// this could probably be flattened into something more optimized
@@ -126,6 +134,13 @@ pub const Result = struct {
file_fd: StoredFileDescriptorType = 0,
import_kind: ast.ImportKind = undefined,
+ pub const Union = union(enum) {
+ success: Result,
+ failure: anyerror,
+ pending: PendingResolution,
+ not_found: void,
+ };
+
pub fn path(this: *Result) ?*Path {
if (!this.path_pair.primary.is_disabled)
return &this.path_pair.primary;
@@ -235,6 +250,7 @@ threadlocal var remap_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var load_as_file_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var remap_path_trailing_slash: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var tsconfig_paths_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var path_in_global_disk_cache_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
pub const DebugLogs = struct {
what: string = "",
@@ -256,33 +272,33 @@ pub const DebugLogs = struct {
// d.indent.deinit();
}
- pub fn increaseIndent(d: *DebugLogs) !void {
+ pub fn increaseIndent(d: *DebugLogs) void {
@setCold(true);
- try d.indent.append(" ");
+ d.indent.append(" ") catch unreachable;
}
- pub fn decreaseIndent(d: *DebugLogs) !void {
+ pub fn decreaseIndent(d: *DebugLogs) void {
@setCold(true);
d.indent.list.shrinkRetainingCapacity(d.indent.list.items.len - 1);
}
- pub fn addNote(d: *DebugLogs, _text: string) !void {
+ pub fn addNote(d: *DebugLogs, _text: string) void {
@setCold(true);
var text = _text;
const len = d.indent.len();
if (len > 0) {
- var __text = try d.notes.allocator.alloc(u8, text.len + len);
+ var __text = d.notes.allocator.alloc(u8, text.len + len) catch unreachable;
std.mem.copy(u8, __text, d.indent.list.items);
std.mem.copy(u8, __text[len..__text.len], _text);
d.notes.allocator.free(_text);
}
- try d.notes.append(logger.rangeData(null, logger.Range.None, text));
+ d.notes.append(logger.rangeData(null, logger.Range.None, text)) catch unreachable;
}
- pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) !void {
+ pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) void {
@setCold(true);
- return try d.addNote(try std.fmt.allocPrint(d.notes.allocator, fmt, args));
+ return d.addNote(std.fmt.allocPrint(d.notes.allocator, fmt, args) catch unreachable);
}
};
@@ -294,6 +310,62 @@ pub const MatchResult = struct {
package_json: ?*PackageJSON = null,
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
dir_info: ?*DirInfo = null,
+
+ pub const Union = union(enum) {
+ not_found: void,
+ success: MatchResult,
+ pending: PendingResolution,
+ failure: anyerror,
+ };
+};
+
+pub const PendingResolution = struct {
+ esm: ESModule.Package.External = .{},
+ dependency: Dependency.Version = .{},
+ resolution_id: Install.PackageID = Install.invalid_package_id,
+ root_dependency_id: Install.PackageID = Install.invalid_package_id,
+ import_record_id: u32 = std.math.maxInt(u32),
+ string_buf: []u8 = "",
+ tag: Tag,
+
+ pub const List = std.MultiArrayList(PendingResolution);
+
+ pub fn deinitListItems(list_: List, allocator: std.mem.Allocator) void {
+ var list = list_;
+ var dependencies = list.items(.dependency);
+ var string_bufs = list.items(.string_buf);
+ for (dependencies) |*dependency| {
+ dependency.deinit();
+ }
+
+ for (string_bufs) |string_buf| {
+ allocator.free(string_buf);
+ }
+ }
+
+ pub fn deinit(this: *PendingResolution, allocator: std.mem.Allocator) void {
+ this.dependency.deinit();
+ allocator.free(this.string_buf);
+ }
+
+ pub const Tag = enum {
+ download,
+ resolve,
+ done,
+ };
+
+ pub fn init(
+ allocator: std.mem.Allocator,
+ esm: ESModule.Package,
+ dependency: Dependency.Version,
+ resolution_id: Install.PackageID,
+ ) !PendingResolution {
+ return PendingResolution{
+ .esm = try esm.copy(allocator),
+ .dependency = dependency,
+ .resolution_id = resolution_id,
+ };
+ }
};
pub const LoadResult = struct {
@@ -358,6 +430,11 @@ pub const Resolver = struct {
caches: CacheSet,
+ package_manager: ?*PackageManager = null,
+ onWakePackageManager: PackageManager.WakeHandler = .{},
+ main_file_for_package_manager: []const u8 = "",
+ env_loader: ?*DotEnv.Loader = null,
+
// These are sets that represent various conditions for the "exports" field
// in package.json.
// esm_conditions_default: std.StringHashMap(bool),
@@ -402,6 +479,27 @@ pub const Resolver = struct {
// all parent directories
dir_cache: *DirInfo.HashMap,
+ pub fn getPackageManager(this: *Resolver) *PackageManager {
+ if (this.package_manager != null) {
+ return this.package_manager.?;
+ }
+ bun.HTTPThead.init() catch unreachable;
+ this.package_manager = PackageManager.initWithRuntime(
+ this.log,
+ this.opts.install,
+ this.allocator,
+ .{},
+ this.env_loader.?,
+ ) catch @panic("Failed to initialize package manager");
+ this.package_manager.?.onWake = this.onWakePackageManager;
+
+ return this.package_manager.?;
+ }
+
+ pub inline fn usePackageManager(self: *const ThisResolver) bool {
+ return self.opts.global_cache.isEnabled();
+ }
+
pub fn init1(
allocator: std.mem.Allocator,
log: *logger.Log,
@@ -590,7 +688,13 @@ pub const Resolver = struct {
}
}
- pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result {
+ pub fn resolveAndAutoInstall(
+ r: *ThisResolver,
+ source_dir: string,
+ import_path: string,
+ kind: ast.ImportKind,
+ global_cache: GlobalCache,
+ ) Result.Union {
const original_order = r.extension_order;
defer r.extension_order = original_order;
r.extension_order = switch (kind) {
@@ -613,10 +717,10 @@ pub const Resolver = struct {
r.debug_logs.?.deinit();
}
- r.debug_logs = try DebugLogs.init(r.allocator);
+ r.debug_logs = DebugLogs.init(r.allocator) catch unreachable;
}
- if (import_path.len == 0) return error.ModuleNotFound;
+ if (import_path.len == 0) return .{ .not_found = {} };
// Certain types of URLs default to being external for convenience
if (r.isExternalPattern(import_path) or
@@ -633,17 +737,19 @@ pub const Resolver = struct {
strings.startsWith(import_path, "//"))
{
if (r.debug_logs) |*debug| {
- try debug.addNote("Marking this path as implicitly external");
+ debug.addNote("Marking this path as implicitly external");
r.flushDebugLogs(.success) catch {};
}
- return Result{
- .import_kind = kind,
- .path_pair = PathPair{
- .primary = Path.init(import_path),
+ return .{
+ .success = Result{
+ .import_kind = kind,
+ .path_pair = PathPair{
+ .primary = Path.init(import_path),
+ },
+ .is_external = true,
+ .module_type = .esm,
},
- .is_external = true,
- .module_type = .esm,
};
}
@@ -653,22 +759,26 @@ pub const Resolver = struct {
// "@import 'data:text/css,body{background:white}';"
if (data_url.decode_mime_type() != .Unsupported) {
if (r.debug_logs) |*debug| {
- debug.addNote("Putting this path in the \"dataurl\" namespace") catch {};
+ debug.addNote("Putting this path in the \"dataurl\" namespace");
r.flushDebugLogs(.success) catch {};
}
- return Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } };
+ return .{
+ .success = Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } },
+ };
}
// "background: url(data:image/png;base64,iVBORw0KGgo=);"
if (r.debug_logs) |*debug| {
- debug.addNote("Marking this \"dataurl\" as external") catch {};
+ debug.addNote("Marking this \"dataurl\" as external");
r.flushDebugLogs(.success) catch {};
}
- return Result{
- .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") },
+ .is_external = true,
+ },
};
}
@@ -676,27 +786,48 @@ pub const Resolver = struct {
// virtual modules (e.g. stdin) if a resolve directory is not specified.
if (source_dir.len == 0) {
if (r.debug_logs) |*debug| {
- debug.addNote("Cannot resolve this path without a directory") catch {};
+ debug.addNote("Cannot resolve this path without a directory");
r.flushDebugLogs(.fail) catch {};
}
- return error.MissingResolveDir;
+ return .{ .failure = error.MissingResolveDir };
}
// r.mutex.lock();
// defer r.mutex.unlock();
errdefer (r.flushDebugLogs(.fail) catch {});
- var result = (try r.resolveWithoutSymlinks(source_dir, import_path, kind)) orelse {
- r.flushDebugLogs(.fail) catch {};
- return error.ModuleNotFound;
- };
- if (!strings.eqlComptime(result.path_pair.primary.namespace, "node"))
- try r.finalizeResult(&result, kind);
+ switch (r.resolveWithoutSymlinks(source_dir, import_path, kind, global_cache)) {
+ .success => |*result| {
+ if (!strings.eqlComptime(result.path_pair.primary.namespace, "node"))
+ r.finalizeResult(result, kind) catch |err| return .{ .failure = err };
- r.flushDebugLogs(.success) catch {};
- result.import_kind = kind;
- return result;
+ r.flushDebugLogs(.success) catch {};
+ result.import_kind = kind;
+ return .{ .success = result.* };
+ },
+ .failure => |e| {
+ r.flushDebugLogs(.fail) catch {};
+ return .{ .failure = e };
+ },
+ .pending => |pending| {
+ r.flushDebugLogs(.fail) catch {};
+ return .{ .pending = pending };
+ },
+ .not_found => {
+ r.flushDebugLogs(.fail) catch {};
+ return .{ .not_found = {} };
+ },
+ }
+ }
+
+ pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result {
+ switch (r.resolveAndAutoInstall(source_dir, import_path, kind, GlobalCache.disable)) {
+ .success => |result| return result,
+ .pending, .not_found => return error.ModuleNotFound,
+
+ .failure => |e| return e,
+ }
}
const ModuleTypeMap = bun.ComptimeStringMap(options.ModuleType, .{
@@ -738,7 +869,7 @@ pub const Resolver = struct {
if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path }) catch {};
+ debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path });
}
} else if (dir.abs_real_path.len > 0) {
var parts = [_]string{ dir.abs_real_path, query.entry.base() };
@@ -776,7 +907,7 @@ pub const Resolver = struct {
const symlink = try Fs.FileSystem.FilenameStore.instance.append(@TypeOf(out), out);
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text }) catch {};
+ debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text });
}
query.entry.cache.symlink = PathString.init(symlink);
if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
@@ -796,7 +927,13 @@ pub const Resolver = struct {
result.module_type = module_type;
}
- pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path_: string, kind: ast.ImportKind) !?Result {
+ pub fn resolveWithoutSymlinks(
+ r: *ThisResolver,
+ source_dir: string,
+ import_path_: string,
+ kind: ast.ImportKind,
+ global_cache: GlobalCache,
+ ) Result.Union {
var import_path = import_path_;
// This implements the module resolution algorithm from node.js, which is
@@ -819,7 +956,7 @@ pub const Resolver = struct {
// users will not be able to accidentally make use of these paths.
if (strings.startsWith(import_path, "/") or std.fs.path.isAbsolutePosix(import_path)) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path}) catch {};
+ debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path});
}
// First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
@@ -830,13 +967,15 @@ pub const Resolver = struct {
if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
// We don't set the directory fd here because it might remap an entirely different directory
- return Result{
- .path_pair = res.path_pair,
- .diff_case = res.diff_case,
- .package_json = res.package_json,
- .dirname_fd = res.dirname_fd,
- .file_fd = res.file_fd,
- .jsx = tsconfig.mergeJSX(result.jsx),
+ return .{
+ .success = Result{
+ .path_pair = res.path_pair,
+ .diff_case = res.diff_case,
+ .package_json = res.package_json,
+ .dirname_fd = res.dirname_fd,
+ .file_fd = res.file_fd,
+ .jsx = tsconfig.mergeJSX(result.jsx),
+ },
};
}
}
@@ -849,28 +988,32 @@ pub const Resolver = struct {
// That way we preserve the literal text in the output and don't generate
// a relative path from the output directory to that path.
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path}) catch {};
+ debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path});
}
- return Result{
- .path_pair = .{ .primary = Path.init(import_path) },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = .{ .primary = Path.init(import_path) },
+ .is_external = true,
+ },
};
}
// Run node's resolution rules (e.g. adding ".js")
if (r.loadAsFileOrDirectory(import_path, kind)) |entry| {
- return Result{
- .dirname_fd = entry.dirname_fd,
- .path_pair = entry.path_pair,
- .diff_case = entry.diff_case,
- .package_json = entry.package_json,
- .file_fd = entry.file_fd,
- .jsx = r.opts.jsx,
+ return .{
+ .success = Result{
+ .dirname_fd = entry.dirname_fd,
+ .path_pair = entry.path_pair,
+ .diff_case = entry.diff_case,
+ .package_json = entry.package_json,
+ .file_fd = entry.file_fd,
+ .jsx = r.opts.jsx,
+ },
};
}
- return null;
+ return .{ .not_found = {} };
}
// Check both relative and package paths for CSS URL tokens, with relative
@@ -889,12 +1032,14 @@ pub const Resolver = struct {
// That way we preserve the literal text in the output and don't generate
// a relative path from the output directory to that path.
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path}) catch {};
+ debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path});
}
- return Result{
- .path_pair = .{ .primary = Path.init(r.fs.dirname_store.append(@TypeOf(abs_path), abs_path) catch unreachable) },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = .{ .primary = Path.init(r.fs.dirname_store.append(@TypeOf(abs_path), abs_path) catch unreachable) },
+ .is_external = true,
+ },
};
}
@@ -912,23 +1057,28 @@ pub const Resolver = struct {
if (remap.len == 0) {
var _path = Path.init(r.fs.dirname_store.append(string, abs_path) catch unreachable);
_path.is_disabled = true;
- return Result{
- .path_pair = PathPair{
- .primary = _path,
+ return .{
+ .success = Result{
+ .path_pair = PathPair{
+ .primary = _path,
+ },
},
};
}
- if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| {
- result = Result{
- .path_pair = _result.path_pair,
- .diff_case = _result.diff_case,
- .dirname_fd = _result.dirname_fd,
- .package_json = pkg,
- .jsx = r.opts.jsx,
- };
- check_relative = false;
- check_package = false;
+ switch (r.resolveWithoutRemapping(import_dir_info, remap, kind, global_cache)) {
+ .success => |_result| {
+ result = Result{
+ .path_pair = _result.path_pair,
+ .diff_case = _result.diff_case,
+ .dirname_fd = _result.dirname_fd,
+ .package_json = pkg,
+ .jsx = r.opts.jsx,
+ };
+ check_relative = false;
+ check_package = false;
+ },
+ else => {},
}
}
}
@@ -945,7 +1095,7 @@ pub const Resolver = struct {
.jsx = r.opts.jsx,
};
} else if (!check_package) {
- return null;
+ return .{ .not_found = {} };
}
}
}
@@ -966,7 +1116,7 @@ pub const Resolver = struct {
result.module_type = .cjs;
result.package_json = @intToPtr(*PackageJSON, @ptrToInt(fallback_module.package_json));
result.is_from_node_modules = true;
- return result;
+ return .{ .success = result };
// "node:*
// "fs"
// "fs/*"
@@ -982,7 +1132,7 @@ pub const Resolver = struct {
result.module_type = .cjs;
result.path_pair.primary.is_disabled = true;
result.is_from_node_modules = true;
- return result;
+ return .{ .success = result };
}
}
@@ -992,11 +1142,13 @@ pub const Resolver = struct {
while (true) {
if (r.opts.external.node_modules.contains(query)) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {};
+ debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query});
}
- return Result{
- .path_pair = .{ .primary = Path.init(query) },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = .{ .primary = Path.init(query) },
+ .is_external = true,
+ },
};
}
@@ -1007,7 +1159,7 @@ pub const Resolver = struct {
}
}
- var source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return null;
+ var source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return .{ .not_found = {} };
// Support remapping one package path to another via the "browser" field
if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| {
@@ -1020,30 +1172,37 @@ pub const Resolver = struct {
if (remapped.len == 0) {
// "browser": {"module": false}
// does the module exist in the filesystem?
- if (r.loadNodeModules(import_path, kind, source_dir_info, false)) |node_module| {
- var pair = node_module.path_pair;
- pair.primary.is_disabled = true;
- if (pair.secondary != null) {
- pair.secondary.?.is_disabled = true;
- }
- return Result{
- .path_pair = pair,
- .dirname_fd = node_module.dirname_fd,
- .diff_case = node_module.diff_case,
- .package_json = package_json,
- .jsx = r.opts.jsx,
- };
- } else {
- // "browser": {"module": false}
- // the module doesn't exist and it's disabled
- // so we should just not try to load it
- var primary = Path.init(import_path);
- primary.is_disabled = true;
- return Result{
- .path_pair = PathPair{ .primary = primary },
- .diff_case = null,
- .jsx = r.opts.jsx,
- };
+ switch (r.loadNodeModules(import_path, kind, source_dir_info, global_cache, false)) {
+ .success => |node_module| {
+ var pair = node_module.path_pair;
+ pair.primary.is_disabled = true;
+ if (pair.secondary != null) {
+ pair.secondary.?.is_disabled = true;
+ }
+ return .{
+ .success = Result{
+ .path_pair = pair,
+ .dirname_fd = node_module.dirname_fd,
+ .diff_case = node_module.diff_case,
+ .package_json = package_json,
+ .jsx = r.opts.jsx,
+ },
+ };
+ },
+ else => {
+ // "browser": {"module": false}
+ // the module doesn't exist and it's disabled
+ // so we should just not try to load it
+ var primary = Path.init(import_path);
+ primary.is_disabled = true;
+ return .{
+ .success = Result{
+ .path_pair = PathPair{ .primary = primary },
+ .diff_case = null,
+ .jsx = r.opts.jsx,
+ },
+ };
+ },
}
}
@@ -1053,54 +1212,59 @@ pub const Resolver = struct {
}
}
- if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| {
- result.path_pair = res.path_pair;
- result.dirname_fd = res.dirname_fd;
- result.file_fd = res.file_fd;
- result.package_json = res.package_json;
- result.diff_case = res.diff_case;
- result.is_from_node_modules = result.is_from_node_modules or res.is_node_module;
- result.jsx = r.opts.jsx;
+ switch (r.resolveWithoutRemapping(source_dir_info, import_path, kind, global_cache)) {
+ .success => |res| {
+ result.path_pair = res.path_pair;
+ result.dirname_fd = res.dirname_fd;
+ result.file_fd = res.file_fd;
+ result.package_json = res.package_json;
+ result.diff_case = res.diff_case;
+ result.is_from_node_modules = result.is_from_node_modules or res.is_node_module;
+ result.jsx = r.opts.jsx;
- if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) {
- return result;
- }
+ if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) {
+ return .{ .success = result };
+ }
- if (res.package_json != null) {
- var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return result;
- if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| {
- if (r.checkBrowserMap(
- browser_scope,
- res.path_pair.primary.text,
- .AbsolutePath,
- )) |remap| {
- if (remap.len == 0) {
- result.path_pair.primary.is_disabled = true;
- result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file");
- } else {
- if (r.resolveWithoutRemapping(browser_scope, remap, kind)) |remapped| {
- result.path_pair = remapped.path_pair;
- result.dirname_fd = remapped.dirname_fd;
- result.file_fd = remapped.file_fd;
- result.package_json = remapped.package_json;
- result.diff_case = remapped.diff_case;
-
- result.is_from_node_modules = result.is_from_node_modules or remapped.is_node_module;
- return result;
+ if (res.package_json != null) {
+ var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return .{ .success = result };
+ if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| {
+ if (r.checkBrowserMap(
+ browser_scope,
+ res.path_pair.primary.text,
+ .AbsolutePath,
+ )) |remap| {
+ if (remap.len == 0) {
+ result.path_pair.primary.is_disabled = true;
+ result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file");
+ } else {
+ switch (r.resolveWithoutRemapping(browser_scope, remap, kind, global_cache)) {
+ .success => |remapped| {
+ result.path_pair = remapped.path_pair;
+ result.dirname_fd = remapped.dirname_fd;
+ result.file_fd = remapped.file_fd;
+ result.package_json = remapped.package_json;
+ result.diff_case = remapped.diff_case;
+
+ result.is_from_node_modules = result.is_from_node_modules or remapped.is_node_module;
+ return .{ .success = result };
+ },
+ else => {},
+ }
}
}
}
}
- }
- return result;
- } else {
- // Note: node's "self references" are not currently supported
- return null;
+ return .{ .success = result };
+ },
+ .pending => |p| return .{ .pending = p },
+ .failure => |p| return .{ .failure = p },
+ else => return .{ .not_found = {} },
}
}
- return result;
+ return .{ .success = result };
}
pub fn packageJSONForResolvedNodeModule(
@@ -1201,17 +1365,18 @@ pub const Resolver = struct {
import_path: string,
kind: ast.ImportKind,
_dir_info: *DirInfo,
+ global_cache: GlobalCache,
forbid_imports: bool,
- ) ?MatchResult {
+ ) MatchResult.Union {
var dir_info = _dir_info;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path });
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -1221,7 +1386,7 @@ pub const Resolver = struct {
// Try path substitutions first
if (tsconfig.paths.count() > 0) {
if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
- return res;
+ return .{ .success = res };
}
}
@@ -1232,7 +1397,7 @@ pub const Resolver = struct {
const abs = r.fs.absBuf(&paths, &load_as_file_or_directory_via_tsconfig_base_path);
if (r.loadAsFileOrDirectory(abs, kind)) |res| {
- return res;
+ return .{ .success = res };
}
// r.allocator.free(abs);
}
@@ -1248,9 +1413,9 @@ pub const Resolver = struct {
if (import_path.len == 1 or strings.hasPrefix(import_path, "#/")) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" must not equal \"#\" and must not start with \"#/\"", .{import_path}) catch {};
+ debug.addNoteFmt("The path \"{s}\" must not equal \"#\" and must not start with \"#/\"", .{import_path});
}
- return null;
+ return .{ .not_found = {} };
}
const esmodule = ESModule{
@@ -1269,24 +1434,34 @@ pub const Resolver = struct {
esm_resolution.path,
kind,
dir_info,
+ global_cache,
true,
);
- return r.handleESMResolution(esm_resolution, package_json.source.path.name.dir, kind, package_json);
+ if (r.handleESMResolution(esm_resolution, package_json.source.path.name.dir, kind, package_json)) |result| {
+ return .{ .success = result };
+ }
+
+ return .{ .not_found = {} };
}
}
}
}
+ var source_dir_info = dir_info;
+ var any_node_modules_folder = false;
+ const use_node_module_resolver = global_cache != .force;
+
// Then check for the package in any enclosing "node_modules" directories
- while (true) {
+ while (use_node_module_resolver) {
// Skip directories that are themselves called "node_modules", since we
// don't ever want to search for "node_modules/node_modules"
- if (dir_info.has_node_modules) {
+ if (dir_info.hasNodeModules()) {
+ any_node_modules_folder = true;
var _paths = [_]string{ dir_info.abs_path, "node_modules", import_path };
const abs_path = r.fs.absBuf(&_paths, &node_modules_check_buf);
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path}) catch {};
+ debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path});
}
if (esm_) |esm| {
@@ -1318,25 +1493,448 @@ pub const Resolver = struct {
// directory path accidentally being interpreted as URL escapes.
const esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root);
- return r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json);
+ if (r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json)) |result| {
+ return .{ .success = result };
+ }
+
+ return .{ .not_found = {} };
}
}
}
}
if (r.loadAsFileOrDirectory(abs_path, kind)) |res| {
- return res;
+ return .{ .success = res };
}
- // r.allocator.free(abs_path);
}
dir_info = dir_info.getParent() orelse break;
}
+ dir_info = source_dir_info;
+
+ // this is the magic!
+ if (global_cache.canUse(any_node_modules_folder) and r.usePackageManager() and esm_ != null) {
+ const esm = esm_.?.withAutoVersion();
+ load_module_from_cache: {
+
+ // If the source directory doesn't have a node_modules directory, we can
+ // check the global cache directory for a package.json file.
+ var manager = r.getPackageManager();
+ var dependency_version: Dependency.Version = .{};
+ var dependency_behavior = @intToEnum(Dependency.Behavior, Dependency.Behavior.normal);
+ // const initial_pending_tasks = manager.pending_tasks;
+ var resolved_package_id: Install.PackageID = brk: {
+ // check if the package.json in the source directory was already added to the lockfile
+ // and try to look up the dependency from there
+ if (dir_info.package_json_for_dependencies) |package_json| {
+ var dependencies_list: []const Dependency = &[_]Dependency{};
+ var string_buf: []const u8 = "";
+ const resolve_from_lockfile = package_json.package_manager_package_id != Install.invalid_package_id;
+
+ if (resolve_from_lockfile) {
+ const dependencies = &manager.lockfile.packages.items(.dependencies)[package_json.package_manager_package_id];
+
+ // try to find this package name in the dependencies of the enclosing package
+ dependencies_list = dependencies.get(manager.lockfile.buffers.dependencies.items);
+ string_buf = manager.lockfile.buffers.string_bytes.items;
+ } else if (esm_.?.version.len == 0) {
+ // If you don't specify a version, default to the one chosen in your package.json
+ dependencies_list = package_json.dependencies.map.values();
+ string_buf = package_json.dependencies.source_buf;
+ }
+
+ var hash: u64 = std.math.maxInt(u64);
+
+ for (dependencies_list) |dependency, dependency_id| {
+ const dep_name_ = &dependency.name;
+ const dep_name = dep_name_.slice(string_buf);
+ if (dep_name.len == esm.name.len) {
+ if (hash == std.math.maxInt(u64)) {
+ hash = bun.hash(dep_name);
+ }
+
+ if (hash != dependency.name_hash) {
+ continue;
+ }
+
+ std.debug.assert(strings.eql(dep_name, esm.name));
+
+ dependency_version = dependency.version;
+ dependency_behavior = dependency.behavior;
+
+ if (resolve_from_lockfile) {
+ const resolutions = &manager.lockfile.packages.items(.resolutions)[package_json.package_manager_package_id];
+
+ // found it!
+ break :brk resolutions.get(manager.lockfile.buffers.resolutions.items)[dependency_id];
+ }
+
+ break;
+ }
+ }
+ }
+
+ // check if the lockfile already resolved this package somewhere
+ {
+ if (dependency_version.tag == .uninitialized) {
+ const sliced_string = Semver.SlicedString.init(esm.version, esm.version);
+ if (esm_.?.version.len > 0 and dir_info.enclosing_package_json != null and global_cache.allowVersionSpecifier()) {
+ return .{ .failure = error.VersionSpecifierNotAllowedHere };
+ }
+ dependency_version = Dependency.parse(
+ r.allocator,
+ esm.version,
+ &sliced_string,
+ r.log,
+ ) orelse break :load_module_from_cache;
+ }
+
+ // first we check if the lockfile already has a version of this package somewhere at all
+ if (manager.lockfile.resolve(esm.name, dependency_version)) |id| {
+ break :brk id;
+ }
+ }
+
+ // If we get here, it means that the lockfile doesn't have this package at all.
+ // we know nothing
+ break :brk Install.invalid_package_id;
+ };
+
+ // Now, there are two possible states:
+ // 1) We have resolved the package ID, either from the
+ // lockfile globally OR from the particular package.json
+ // dependencies list
+ //
+ // 2) We parsed the Dependency.Version but there is no
+ // existing resolved package ID
+
+ // If its an exact version, we can just immediately look it up in the global cache and resolve from there
+ // If the resolved package ID is _not_ invalid, we can just check
+
+ // If this returns null, then it means we need to *resolve* the package
+ // Even after resolution, we might still need to download the package
+ // There are two steps here! Two steps!
+ const resolution: Resolution = brk: {
+ if (resolved_package_id != Install.invalid_package_id) {
+ break :brk manager.lockfile.packages.items(.resolution)[resolved_package_id];
+ }
+
+ // unsupported or not found dependency, we might need to install it to the cache
+ switch (r.enqueueDependencyToResolve(
+ dir_info.package_json_for_dependencies orelse dir_info.package_json,
+ esm,
+ dependency_behavior,
+ &resolved_package_id,
+ dependency_version,
+ )) {
+ .resolution => |res| break :brk res,
+ .pending => |pending| return .{ .pending = pending },
+ .failure => |err| return .{ .failure = err },
+ // this means we looked it up in the registry and the package doesn't exist or the version doesn't exist
+ .not_found => return .{ .not_found = {} },
+ }
+ };
+
+ const dir_path_for_resolution = manager.pathForResolution(resolved_package_id, resolution, &path_in_global_disk_cache_buf) catch |err| {
+ // if it's missing, we need to install it
+ if (err == error.FileNotFound) {
+ switch (manager.getPreinstallState(resolved_package_id, manager.lockfile)) {
+ .done => {
+ var path = Fs.Path.init(import_path);
+ path.is_disabled = true;
+ // this might mean the package is disabled
+ return .{
+ .success = .{
+ .path_pair = .{
+ .primary = path,
+ },
+ },
+ };
+ },
+ .extract, .extracting => |st| {
+ if (!global_cache.canInstall()) {
+ return .{ .not_found = {} };
+ }
+ var builder = Semver.String.Builder{};
+ esm.count(&builder);
+ builder.allocate(manager.allocator) catch unreachable;
+ const cloned = esm.clone(&builder);
+
+ if (st == .extract)
+ manager.enqueuePackageForDownload(
+ esm.name,
+ resolved_package_id,
+ resolution.value.npm.version,
+ manager.lockfile.str(resolution.value.npm.url),
+ .{
+ .root_request_id = 0,
+ },
+ );
+
+ return .{
+ .pending = .{
+ .esm = cloned,
+ .dependency = dependency_version,
+ .resolution_id = resolved_package_id,
+
+ .string_buf = builder.allocatedSlice(),
+ .tag = .download,
+ },
+ };
+ },
+ else => {},
+ }
+ }
+
+ return .{ .failure = err };
+ };
+
+ if (r.dirInfoForResolution(dir_path_for_resolution, resolved_package_id)) |dir_info_to_use_| {
+ if (dir_info_to_use_) |pkg_dir_info| {
+ const abs_package_path = pkg_dir_info.abs_path;
+
+ if (pkg_dir_info.package_json) |package_json| {
+ if (package_json.exports) |exports_map| {
+ // The condition set is determined by the kind of import
+ const esmodule = ESModule{
+ .conditions = switch (kind) {
+ ast.ImportKind.require,
+ ast.ImportKind.require_resolve,
+ => r.opts.conditions.require,
+ else => r.opts.conditions.import,
+ },
+ .allocator = r.allocator,
+ .debug_logs = if (r.debug_logs) |*debug|
+ debug
+ else
+ null,
+ };
+
+ // Resolve against the path "/", then join it with the absolute
+ // directory path. This is done because ESM package resolution uses
+ // URLs while our path resolution uses file system paths. We don't
+ // want problems due to Windows paths, which are very unlike URL
+ // paths. We also want to avoid any "%" characters in the absolute
+ // directory path accidentally being interpreted as URL escapes.
+ const esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root);
+
+ if (r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json)) |*result| {
+ result.is_node_module = true;
+ return .{ .success = result.* };
+ }
+
+ return .{ .not_found = {} };
+ }
+ }
+
+ var _paths = [_]string{ pkg_dir_info.abs_path, esm.subpath };
+ const abs_path = r.fs.absBuf(&_paths, &node_modules_check_buf);
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path});
+ }
+
+ if (r.loadAsFileOrDirectory(abs_path, kind)) |*res| {
+ res.is_node_module = true;
+ return .{ .success = res.* };
+ }
+ }
+ } else |err| {
+ return .{ .failure = err };
+ }
+ }
+ }
+
// Mostly to cut scope, we don't resolve `NODE_PATH` environment variable.
// But also: https://github.com/nodejs/node/issues/38128#issuecomment-814969356
+ return .{ .not_found = {} };
+ }
+ fn dirInfoForResolution(
+ r: *ThisResolver,
+ dir_path: []const u8,
+ package_id: Install.PackageID,
+ ) !?*DirInfo {
+ std.debug.assert(r.package_manager != null);
- return null;
+ var dir_cache_info_result = r.dir_cache.getOrPut(dir_path) catch unreachable;
+ if (dir_cache_info_result.status == .exists) {
+ // we've already looked up this package before
+ return r.dir_cache.atIndex(dir_cache_info_result.index).?;
+ }
+ var rfs = &r.fs.fs;
+ var cached_dir_entry_result = rfs.entries.getOrPut(dir_path) catch unreachable;
+
+ var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined;
+ var needs_iter: bool = true;
+ var open_dir = std.fs.openDirAbsolute(dir_path, .{ .iterate = true }) catch |err| {
+ switch (err) {
+ error.FileNotFound => unreachable,
+ else => {
+ // TODO: handle this error better
+ r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Unable to open directory: {s}", .{std.mem.span(@errorName(err))}) catch unreachable;
+ return err;
+ },
+ }
+ };
+
+ if (rfs.entries.atIndex(cached_dir_entry_result.index)) |cached_entry| {
+ if (cached_entry.* == .entries) {
+ dir_entries_option = cached_entry;
+ needs_iter = false;
+ }
+ }
+
+ if (needs_iter) {
+ const allocator = r.fs.allocator;
+ dir_entries_option = rfs.entries.put(&cached_dir_entry_result, .{
+ .entries = Fs.FileSystem.DirEntry.init(dir_path),
+ }) catch unreachable;
+
+ if (FeatureFlags.store_file_descriptors) {
+ Fs.FileSystem.setMaxFd(open_dir.fd);
+ dir_entries_option.entries.fd = open_dir.fd;
+ }
+ var dir_iterator = open_dir.iterate();
+ while (dir_iterator.next() catch null) |_value| {
+ dir_entries_option.entries.addEntry(_value, allocator, void, void{}) catch unreachable;
+ }
+ }
+
+ // We must initialize it as empty so that the result index is correct.
+ // This is important so that browser_scope has a valid index.
+ var dir_info_ptr = r.dir_cache.put(&dir_cache_info_result, DirInfo{}) catch unreachable;
+
+ try r.dirInfoUncached(
+ dir_info_ptr,
+ dir_path,
+ dir_entries_option,
+ dir_cache_info_result,
+ cached_dir_entry_result.index,
+ // Packages in the global disk cache are top-level, we shouldn't try
+ // to check for a parent package.json
+ null,
+ allocators.NotFound,
+ open_dir.fd,
+ package_id,
+ );
+ return dir_info_ptr;
+ }
+
+ const DependencyToResolve = union(enum) {
+ not_found: void,
+ pending: PendingResolution,
+ failure: anyerror,
+ resolution: Resolution,
+ };
+
+ fn enqueueDependencyToResolve(
+ r: *ThisResolver,
+ package_json_: ?*PackageJSON,
+ esm: ESModule.Package,
+ behavior: Dependency.Behavior,
+ input_package_id_: *Install.PackageID,
+ version: Dependency.Version,
+ ) DependencyToResolve {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Enqueueing pending dependency \"{s}@{s}\"", .{ esm.name, esm.version });
+ }
+
+ const input_package_id = input_package_id_.*;
+ var pm = r.getPackageManager();
+ if (comptime Environment.allow_assert) {
+ // we should never be trying to resolve a dependency that is already resolved
+ std.debug.assert(pm.lockfile.resolve(esm.name, version) == null);
+ }
+
+ // Add the containing package to the lockfile
+
+ var package: Package = .{};
+
+ if (pm.lockfile.packages.len == 0 and input_package_id == Install.invalid_package_id) {
+ if (package_json_) |package_json| {
+ package = Package.fromPackageJSON(
+ pm.allocator,
+ pm.lockfile,
+ r.log,
+ package_json,
+ Install.Features{
+ .dev_dependencies = true,
+ .is_main = true,
+ .dependencies = true,
+ .optional_dependencies = true,
+ },
+ ) catch |err| {
+ return .{ .failure = err };
+ };
+
+ package.resolution = .{
+ .tag = .root,
+ .value = .{ .root = {} },
+ };
+
+ package = pm.lockfile.appendPackage(package) catch |err| {
+ return .{ .failure = err };
+ };
+ package_json.package_manager_package_id = package.meta.id;
+ } else {
+ // we're resolving an unknown package
+ // the unknown package is the root package
+ package = Package{
+ .name = Semver.String.init("", ""),
+ };
+ package.resolution = .{
+ .tag = .root,
+ .value = .{ .root = {} },
+ };
+ package = pm.lockfile.appendPackage(package) catch |err| {
+ return .{ .failure = err };
+ };
+ }
+ }
+
+ if (r.opts.prefer_offline_install) {
+ if (pm.resolveFromDiskCache(esm.name, version)) |package_id| {
+ input_package_id_.* = package_id;
+ return .{ .resolution = pm.lockfile.packages.items(.resolution)[package_id] };
+ }
+ }
+
+ if (input_package_id == Install.invalid_package_id or input_package_id == 0) {
+
+ // All packages are enqueued to the root
+ // because we download all the npm package dependencies
+ switch (pm.enqueueDependencyToRoot(esm.name, esm.version, version, behavior)) {
+ .resolution => |result| {
+ input_package_id_.* = result.package_id;
+ return .{ .resolution = result.resolution };
+ },
+ .pending => |id| {
+ var builder = Semver.String.Builder{};
+ esm.count(&builder);
+ builder.allocate(pm.allocator) catch unreachable;
+ const cloned = esm.clone(&builder);
+
+ return .{
+ .pending = .{
+ .esm = cloned,
+ .dependency = version,
+ .resolution_id = Install.invalid_package_id,
+ .root_dependency_id = id,
+ .string_buf = builder.allocatedSlice(),
+ .tag = .resolve,
+ },
+ };
+ },
+ .not_found => {
+ return .{ .not_found = {} };
+ },
+ .failure => |err| {
+ return .{ .failure = err };
+ },
+ }
+ }
+
+ bun.unreachablePanic("TODO: implement enqueueDependencyToResolve for non-root packages", .{});
}
fn handleESMResolution(r: *ThisResolver, esm_resolution_: ESModule.Resolution, abs_package_path: string, kind: ast.ImportKind, package_json: *PackageJSON) ?MatchResult {
@@ -1409,13 +2007,22 @@ pub const Resolver = struct {
}
}
- pub fn resolveWithoutRemapping(r: *ThisResolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult {
+ pub fn resolveWithoutRemapping(
+ r: *ThisResolver,
+ source_dir_info: *DirInfo,
+ import_path: string,
+ kind: ast.ImportKind,
+ global_cache: GlobalCache,
+ ) MatchResult.Union {
if (isPackagePath(import_path)) {
- return r.loadNodeModules(import_path, kind, source_dir_info, false);
+ return r.loadNodeModules(import_path, kind, source_dir_info, global_cache, false);
} else {
const paths = [_]string{ source_dir_info.abs_path, import_path };
var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf);
- return r.loadAsFileOrDirectory(resolved, kind);
+ if (r.loadAsFileOrDirectory(resolved, kind)) |result| {
+ return .{ .success = result };
+ }
+ return .{ .not_found = {} };
}
}
@@ -1469,12 +2076,34 @@ pub const Resolver = struct {
return bin_folders.constSlice();
}
- pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
+ pub fn parsePackageJSON(
+ r: *ThisResolver,
+ file: string,
+ dirname_fd: StoredFileDescriptorType,
+ package_id: ?Install.PackageID,
+ comptime allow_dependencies: bool,
+ ) !?*PackageJSON {
var pkg: PackageJSON = undefined;
if (!r.care_about_scripts) {
- pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, false) orelse return null;
+ pkg = PackageJSON.parse(
+ r,
+ file,
+ dirname_fd,
+ package_id,
+ true,
+ if (allow_dependencies) .local else .none,
+ false,
+ ) orelse return null;
} else {
- pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, true) orelse return null;
+ pkg = PackageJSON.parse(
+ r,
+ file,
+ dirname_fd,
+ package_id,
+ true,
+ if (allow_dependencies) .local else .none,
+ true,
+ ) orelse return null;
}
var _pkg = try r.allocator.create(PackageJSON);
@@ -1755,6 +2384,7 @@ pub const Resolver = struct {
r.dir_cache.atIndex(top_parent.index),
top_parent.index,
open_dir.fd,
+ null,
);
if (queue_slice.len == 0) {
@@ -1779,7 +2409,7 @@ pub const Resolver = struct {
// official TypeScript compiler
pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *const TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable;
+ debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path });
}
var abs_base_url = tsconfig.base_url_for_paths;
@@ -1792,7 +2422,7 @@ pub const Resolver = struct {
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url}) catch unreachable;
+ debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url});
}
// Check for exact matches first
@@ -1857,7 +2487,7 @@ pub const Resolver = struct {
// prefix. This matches the behavior of the TypeScript compiler.
if (longest_match_prefix_length > -1) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix }) catch unreachable;
+ debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix });
}
for (longest_match.original_paths) |original_path| {
@@ -1920,7 +2550,7 @@ pub const Resolver = struct {
std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext);
const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len];
// if (r.debug_logs) |*debug| {
- // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path});
// }
if (map.get(new_path)) |_remapped| {
this.remapped = _remapped;
@@ -1950,7 +2580,7 @@ pub const Resolver = struct {
std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len .. index_path.len + ext.len], ext);
const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. index_path.len + ext.len];
// if (r.debug_logs) |*debug| {
- // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path});
// }
if (map.get(new_path)) |_remapped| {
this.remapped = _remapped;
@@ -2032,7 +2662,7 @@ pub const Resolver = struct {
// package and the parent package.
const isInSamePackage = brk: {
const parent = dir_info.getParent() orelse break :brk true;
- break :brk !parent.is_node_modules;
+ break :brk !parent.isNodeModules();
};
if (isInSamePackage) {
@@ -2054,13 +2684,13 @@ pub const Resolver = struct {
var field_rel_path = _field_rel_path;
// Is this a directory?
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path }) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path });
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -2142,7 +2772,7 @@ pub const Resolver = struct {
};
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable;
+ debug.addNoteFmt("Found file: \"{s}\"", .{out_buf});
}
if (dir_info.package_json) |package_json| {
@@ -2165,7 +2795,7 @@ pub const Resolver = struct {
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base }) catch unreachable;
+ debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base });
}
}
@@ -2264,13 +2894,13 @@ pub const Resolver = struct {
// Is this a directory?
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path}) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path});
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -2290,13 +2920,13 @@ pub const Resolver = struct {
const auto_main = r.opts.main_fields.ptr == options.Platform.DefaultMainFields.get(r.opts.platform).ptr;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text}) catch {};
+ debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text});
}
for (main_field_keys) |key| {
const field_rel_path = (main_field_values.get(key)) orelse {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Did not find main field \"{s}\"", .{key}) catch {};
+ debug.addNoteFmt("Did not find main field \"{s}\"", .{key});
}
continue;
};
@@ -2331,9 +2961,9 @@ pub const Resolver = struct {
// same time.
if (kind != ast.ImportKind.require) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text }) catch {};
+ debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text });
- debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text}) catch {};
+ debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text});
}
return MatchResult{
@@ -2352,7 +2982,7 @@ pub const Resolver = struct {
auto_main_result.path_pair.primary.text,
key,
pkg_json.source.key_path.text,
- }) catch {};
+ });
}
var _auto_main_result = auto_main_result;
_auto_main_result.package_json = package_json;
@@ -2380,12 +3010,12 @@ pub const Resolver = struct {
var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path}) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path});
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -2420,13 +3050,13 @@ pub const Resolver = struct {
// Try the plain path without any extensions
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}\" ", .{base}) catch {};
+ debug.addNoteFmt("Checking for file \"{s}\" ", .{base});
}
if (entries.get(base)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {};
+ debug.addNoteFmt("Found file \"{s}\" ", .{base});
}
const abs_path = brk: {
@@ -2455,13 +3085,13 @@ pub const Resolver = struct {
const file_name = buffer[path.len - base.len .. buffer.len];
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer});
}
if (entries.get(file_name)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Found file \"{s}\" ", .{buffer});
}
// now that we've found it, we allocate it.
@@ -2513,7 +3143,7 @@ pub const Resolver = struct {
if (entries.get(buffer)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer});
}
return LoadResult{
@@ -2538,14 +3168,14 @@ pub const Resolver = struct {
}
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base}) catch {};
+ debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base});
}
}
}
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to find \"{s}\" ", .{path}) catch {};
+ debug.addNoteFmt("Failed to find \"{s}\" ", .{path});
}
if (comptime FeatureFlags.watch_directories) {
@@ -2568,6 +3198,7 @@ pub const Resolver = struct {
parent: ?*DirInfo,
parent_index: allocators.IndexType,
fd: FileDescriptorType,
+ package_id: ?Install.PackageID,
) anyerror!void {
var result = _result;
@@ -2587,18 +3218,18 @@ pub const Resolver = struct {
// base must
if (base.len > 1 and base[base.len - 1] == std.fs.path.sep) base = base[0 .. base.len - 1];
- info.is_node_modules = strings.eqlComptime(base, "node_modules");
+ info.flags.setPresent(.is_node_modules, strings.eqlComptime(base, "node_modules"));
// if (entries != null) {
- if (!info.is_node_modules) {
+ if (!info.isNodeModules()) {
if (entries.getComptimeQuery("node_modules")) |entry| {
- info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
+ info.flags.setPresent(.has_node_modules, (entry.entry.kind(rfs)) == .dir);
}
}
if (r.care_about_bin_folder) {
append_bin_dir: {
- if (info.has_node_modules) {
+ if (info.hasNodeModules()) {
if (entries.hasComptimeQuery("node_modules")) {
if (!bin_folders_loaded) {
bin_folders_loaded = true;
@@ -2622,7 +3253,7 @@ pub const Resolver = struct {
}
}
- if (info.is_node_modules) {
+ if (info.isNodeModules()) {
if (entries.getComptimeQuery(".bin")) |q| {
if (q.entry.kind(rfs) == .dir) {
if (!bin_folders_loaded) {
@@ -2663,9 +3294,14 @@ pub const Resolver = struct {
if (parent_package_json.name.len > 0 or r.care_about_bin_folder) {
info.enclosing_package_json = parent_package_json;
}
+
+ if (parent_package_json.dependencies.map.count() > 0 or parent_package_json.package_manager_package_id != Install.invalid_package_id) {
+ info.package_json_for_dependencies = parent_package_json;
+ }
}
info.enclosing_package_json = info.enclosing_package_json orelse parent.?.enclosing_package_json;
+ info.package_json_for_dependencies = info.package_json_for_dependencies orelse parent.?.package_json_for_dependencies;
// Make sure "absRealPath" is the real path of the directory (resolving any symlinks)
if (!r.opts.preserve_symlinks) {
@@ -2677,7 +3313,7 @@ pub const Resolver = struct {
var symlink = entry.symlink(rfs);
if (symlink.len > 0) {
if (r.debug_logs) |*logs| {
- try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
}
info.abs_real_path = symlink;
} else if (parent.?.abs_real_path.len > 0) {
@@ -2686,7 +3322,7 @@ pub const Resolver = struct {
symlink = r.fs.dirname_store.append(string, r.fs.absBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
if (r.debug_logs) |*logs| {
- try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
}
lookup.entry.cache.symlink = PathString.init(symlink);
info.abs_real_path = symlink;
@@ -2700,7 +3336,10 @@ pub const Resolver = struct {
if (entries.getComptimeQuery("package.json")) |lookup| {
const entry = lookup.entry;
if (entry.kind(rfs) == .file) {
- info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null;
+ info.package_json = if (r.usePackageManager() and !info.hasNodeModules() and !info.isNodeModules())
+ r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0, package_id, true) catch null
+ else
+ r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0, null, false) catch null;
if (info.package_json) |pkg| {
if (pkg.browser_map.count() > 0) {
@@ -2711,10 +3350,13 @@ pub const Resolver = struct {
if (pkg.name.len > 0 or r.care_about_bin_folder)
info.enclosing_package_json = pkg;
+ if (pkg.dependencies.map.count() > 0 or pkg.package_manager_package_id != Install.invalid_package_id)
+ info.package_json_for_dependencies = pkg;
+
if (r.debug_logs) |*logs| {
logs.addNoteFmt("Resolved package.json in \"{s}\"", .{
path,
- }) catch unreachable;
+ });
}
}
}
@@ -2836,3 +3478,51 @@ pub const RootPathPair = struct {
base_path: string,
package_json: *const PackageJSON,
};
+
+pub const GlobalCache = enum {
+ allow_install,
+ read_only,
+ auto,
+ force,
+ fallback,
+ disable,
+
+ pub const Map = bun.ComptimeStringMap(GlobalCache, .{
+ .{ "auto", GlobalCache.auto },
+ .{ "force", GlobalCache.force },
+ .{ "disable", GlobalCache.disable },
+ .{ "fallback", GlobalCache.fallback },
+ });
+
+ pub fn allowVersionSpecifier(this: GlobalCache) bool {
+ return this == .force;
+ }
+
+ pub fn canUse(this: GlobalCache, has_a_node_modules_folder: bool) bool {
+ // When there is a node_modules folder, we default to false
+ // When there is NOT a node_modules folder, we default to true
+ // That is the difference between these two branches.
+ if (has_a_node_modules_folder) {
+ return switch (this) {
+ .fallback, .allow_install, .force => true,
+ .read_only, .disable, .auto => false,
+ };
+ } else {
+ return switch (this) {
+ .fallback, .allow_install, .auto, .force => true,
+ .read_only, .disable => false,
+ };
+ }
+ }
+
+ pub fn isEnabled(this: GlobalCache) bool {
+ return this != .disable;
+ }
+
+ pub fn canInstall(this: GlobalCache) bool {
+ return switch (this) {
+ .auto, .allow_install, .force, .fallback => true,
+ else => false,
+ };
+ }
+};