diff options
author | 2023-01-14 17:14:48 +0200 | |
---|---|---|
committer | 2023-01-14 07:14:48 -0800 | |
commit | 7fa023b8b55e7726eba90f462661c1e2c4641951 (patch) | |
tree | b3ab4b2fa9ca7e74b1fc942dbc593f27d447e32c | |
parent | aa9e56edfeee3089dc9a3d820d021be20542d3da (diff) | |
download | bun-7fa023b8b55e7726eba90f462661c1e2c4641951.tar.gz bun-7fa023b8b55e7726eba90f462661c1e2c4641951.tar.zst bun-7fa023b8b55e7726eba90f462661c1e2c4641951.zip |
support installation of NPM workspaces (#1764)
-rw-r--r-- | packages/bun-types/fs/promises.d.ts | 10 | ||||
-rw-r--r-- | src/install/dependency.zig | 174 | ||||
-rw-r--r-- | src/install/install.zig | 162 | ||||
-rw-r--r-- | src/install/lockfile.zig | 405 | ||||
-rw-r--r-- | src/install/resolution.zig | 6 | ||||
-rw-r--r-- | src/install/resolvers/folder_resolver.zig | 61 | ||||
-rw-r--r-- | src/resolver/resolver.zig | 8 | ||||
-rw-r--r-- | test/bun.js/install/bun-install.test.ts | 344 |
8 files changed, 833 insertions, 337 deletions
diff --git a/packages/bun-types/fs/promises.d.ts b/packages/bun-types/fs/promises.d.ts index c4356da5c..dc0bce6d0 100644 --- a/packages/bun-types/fs/promises.d.ts +++ b/packages/bun-types/fs/promises.d.ts @@ -24,6 +24,7 @@ declare module "fs/promises" { WriteFileOptions, SimlinkType, Abortable, + RmOptions, } from "node:fs"; interface FlagAndOpenMode { @@ -670,6 +671,15 @@ declare module "fs/promises" { | BufferEncoding | null, ): Promise<string | Buffer>; + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm( + path: PathLike, + options?: RmOptions, + ): Promise<void>; } declare module "node:fs/promises" { diff --git a/src/install/dependency.zig b/src/install/dependency.zig index e85d7cb85..b03d49e4a 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -235,6 +235,7 @@ pub const Version = struct { .folder, .dist_tag => lhs.literal.eql(rhs.literal, lhs_buf, rhs_buf), .tarball => lhs.value.tarball.eql(rhs.value.tarball, lhs_buf, rhs_buf), .symlink => lhs.value.symlink.eql(rhs.value.symlink, lhs_buf, rhs_buf), + .workspace => lhs.value.workspace.eql(rhs.value.workspace, lhs_buf, rhs_buf), else => true, }; } @@ -259,7 +260,6 @@ pub const Version = struct { /// https://stackoverflow.com/questions/51954956/whats-the-difference-between-yarn-link-and-npm-link symlink = 5, - /// TODO: workspace = 6, /// TODO: git = 7, @@ -271,20 +271,24 @@ pub const Version = struct { } pub inline fn isGitHubRepoPath(dependency: string) bool { - var slash_count: u8 = 0; + if (dependency.len < 3) return false; + if (dependency[0] == '/') return false; - for (dependency) |c| { - slash_count += @as(u8, @boolToInt(c == '/')); - if (slash_count > 1 or c == '#') break; + var slash_index: usize = 0; + for (dependency) |c, i| { // Must be alphanumeric switch (c) { - '\\', '/', 'a'...'z', 'A'...'Z', '0'...'9', '%' => {}, + '/' => { + if (slash_index > 0) return false; + slash_index = i; + }, + '\\', 'a'...'z', 'A'...'Z', '0'...'9', '%' => {}, else => return false, } } - return (slash_count == 1); + return slash_index > 0 and slash_index != dependency.len - 1; } // this won't work for query string params @@ -331,32 +335,17 @@ pub const Version = struct { // git://, git@, git+ssh 'g' => { - if (strings.eqlComptime( - dependency[0..@min("git://".len, dependency.len)], - "git://", - ) or strings.eqlComptime( - dependency[0..@min("git@".len, dependency.len)], - "git@", - ) or strings.eqlComptime( - dependency[0..@min("git+ssh".len, dependency.len)], - "git+ssh", - ) or strings.eqlComptime( - dependency[0..@min("git+file".len, dependency.len)], - "git+file", - ) or strings.eqlComptime( - dependency[0..@min("git+http".len, dependency.len)], - "git+http", - ) or strings.eqlComptime( - dependency[0..@min("git+https".len, dependency.len)], - "git+https", - )) { + if (strings.hasPrefixComptime(dependency, "git://") or + strings.hasPrefixComptime(dependency, "git@") or + strings.hasPrefixComptime(dependency, "git+ssh") or + strings.hasPrefixComptime(dependency, "git+file") or + strings.hasPrefixComptime(dependency, "git+http") or + strings.hasPrefixComptime(dependency, "git+https")) + { return .git; } - if (strings.eqlComptime( - dependency[0..@min("github".len, dependency.len)], - "github", - ) or isGitHubRepoPath(dependency)) { + if (strings.hasPrefixComptime(dependency, "github:") or isGitHubRepoPath(dependency)) { return .github; } @@ -378,24 +367,15 @@ pub const Version = struct { } var remainder = dependency; - if (strings.eqlComptime( - remainder[0..@min("https://".len, remainder.len)], - "https://", - )) { + if (strings.hasPrefixComptime(remainder, "https://")) { remainder = remainder["https://".len..]; } - if (strings.eqlComptime( - remainder[0..@min("http://".len, remainder.len)], - "http://", - )) { + if (strings.hasPrefixComptime(remainder, "http://")) { remainder = remainder["http://".len..]; } - if (strings.eqlComptime( - remainder[0..@min("github".len, remainder.len)], - "github", - ) or isGitHubRepoPath(remainder)) { + if (strings.hasPrefixComptime(remainder, "github.com/") or isGitHubRepoPath(remainder)) { return .github; } @@ -422,10 +402,7 @@ pub const Version = struct { if (isTarball(dependency)) return .tarball; - if (strings.eqlComptime( - dependency[0..@min("file:".len, dependency.len)], - "file:", - )) { + if (strings.hasPrefixComptime(dependency, "file:")) { return .folder; } @@ -441,10 +418,7 @@ pub const Version = struct { if (isTarball(dependency)) return .tarball; - if (strings.eqlComptime( - dependency[0..@min("link:".len, dependency.len)], - "link:", - )) { + if (strings.hasPrefixComptime(dependency, "link:")) { return .symlink; } @@ -455,25 +429,6 @@ pub const Version = struct { return .dist_tag; }, - // workspace:// - 'w' => { - if (strings.eqlComptime( - dependency[0..@min("workspace://".len, dependency.len)], - "workspace://", - )) { - return .workspace; - } - - if (isTarball(dependency)) - return .tarball; - - if (isGitHubRepoPath(dependency)) { - return .github; - } - - return .dist_tag; - }, - else => {}, } @@ -499,8 +454,7 @@ pub const Version = struct { /// Equivalent to npm link symlink: String, - /// Unsupported, but still parsed so an error can be thrown - workspace: void, + workspace: String, /// Unsupported, but still parsed so an error can be thrown git: void, /// Unsupported, but still parsed so an error can be thrown @@ -525,16 +479,26 @@ pub fn eqlResolved(a: Dependency, b: Dependency) bool { return @as(Dependency.Version.Tag, a.version) == @as(Dependency.Version.Tag, b.version) and a.resolution == b.resolution; } -pub fn parse( +pub inline fn parse( + allocator: std.mem.Allocator, + dependency: string, + sliced: *const SlicedString, + log: ?*logger.Log, +) ?Version { + return parseWithOptionalTag(allocator, dependency, null, sliced, log); +} + +pub fn parseWithOptionalTag( allocator: std.mem.Allocator, dependency_: string, + tag_or_null: ?Dependency.Version.Tag, sliced: *const SlicedString, log: ?*logger.Log, ) ?Version { var dependency = std.mem.trimLeft(u8, dependency_, " \t\n\r"); if (dependency.len == 0) return null; - const tag = Version.Tag.infer(dependency); + const tag = tag_or_null orelse Version.Tag.infer(dependency); if (tag == .npm and strings.hasPrefixComptime(dependency, "npm:")) { dependency = dependency[4..]; @@ -652,7 +616,14 @@ pub fn parseWithTag( .literal = sliced.value(), }; }, - .workspace, .git, .github => { + .workspace => { + return Version{ + .value = .{ .workspace = sliced.value() }, + .tag = .workspace, + .literal = sliced.value(), + }; + }, + .git, .github => { if (log_) |log| log.addErrorFmt(null, logger.Loc.Empty, allocator, "Support for dependency type \"{s}\" is not implemented yet (\"{s}\")", .{ @tagName(tag), dependency }) catch unreachable; return null; }, @@ -667,6 +638,11 @@ pub const Behavior = enum(u8) { pub const optional: u8 = 1 << 2; pub const dev: u8 = 1 << 3; pub const peer: u8 = 1 << 4; + pub const workspace: u8 = 1 << 5; + + pub inline fn isNormal(this: Behavior) bool { + return (@enumToInt(this) & Behavior.normal) != 0; + } pub inline fn isOptional(this: Behavior) bool { return (@enumToInt(this) & Behavior.optional) != 0 and !this.isPeer(); @@ -680,16 +656,48 @@ pub const Behavior = enum(u8) { return (@enumToInt(this) & Behavior.peer) != 0; } - pub inline fn isNormal(this: Behavior) bool { - return (@enumToInt(this) & Behavior.normal) != 0; + pub inline fn isWorkspace(this: Behavior) bool { + return (@enumToInt(this) & Behavior.workspace) != 0; + } + + pub inline fn setNormal(this: Behavior, value: bool) Behavior { + if (value) { + return @intToEnum(Behavior, @enumToInt(this) | Behavior.normal); + } else { + return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.normal); + } } pub inline fn setOptional(this: Behavior, value: bool) Behavior { - return @intToEnum(Behavior, @enumToInt(this) | (@as(u8, @boolToInt(value))) << 2); + if (value) { + return @intToEnum(Behavior, @enumToInt(this) | Behavior.optional); + } else { + return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.optional); + } } pub inline fn setDev(this: Behavior, value: bool) Behavior { - return @intToEnum(Behavior, @enumToInt(this) | (@as(u8, @boolToInt(value))) << 2); + if (value) { + return @intToEnum(Behavior, @enumToInt(this) | Behavior.dev); + } else { + return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.dev); + } + } + + pub inline fn setPeer(this: Behavior, value: bool) Behavior { + if (value) { + return @intToEnum(Behavior, @enumToInt(this) | Behavior.peer); + } else { + return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.peer); + } + } + + pub inline fn setWorkspace(this: Behavior, value: bool) Behavior { + if (value) { + return @intToEnum(Behavior, @enumToInt(this) | Behavior.workspace); + } else { + return @intToEnum(Behavior, @enumToInt(this) & ~Behavior.workspace); + } } pub inline fn cmp(lhs: Behavior, rhs: Behavior) std.math.Order { @@ -725,6 +733,13 @@ pub const Behavior = enum(u8) { .lt; } + if (lhs.isWorkspace() != rhs.isWorkspace()) { + return if (lhs.isWorkspace()) + .gt + else + .lt; + } + return .eq; } @@ -734,8 +749,9 @@ pub const Behavior = enum(u8) { pub fn isEnabled(this: Behavior, features: Features) bool { return this.isNormal() or + (features.optional_dependencies and this.isOptional()) or (features.dev_dependencies and this.isDev()) or (features.peer_dependencies and this.isPeer()) or - (features.optional_dependencies and this.isOptional()); + this.isWorkspace(); } }; diff --git a/src/install/install.zig b/src/install/install.zig index a8b471058..a1a8d69a0 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -429,12 +429,13 @@ pub const Origin = enum(u8) { }; pub const Features = struct { - optional_dependencies: bool = false, + dependencies: bool = true, dev_dependencies: bool = false, - scripts: bool = false, - peer_dependencies: bool = true, is_main: bool = false, - dependencies: bool = true, + optional_dependencies: bool = false, + peer_dependencies: bool = true, + scripts: bool = false, + workspaces: bool = false, check_for_duplicate_dependencies: bool = false, @@ -444,25 +445,25 @@ pub const Features = struct { out |= @as(u8, @boolToInt(this.optional_dependencies)) << 2; out |= @as(u8, @boolToInt(this.dev_dependencies)) << 3; out |= @as(u8, @boolToInt(this.peer_dependencies)) << 4; + out |= @as(u8, @boolToInt(this.workspaces)) << 5; return @intToEnum(Behavior, out); } pub const folder = Features{ + .dev_dependencies = true, .optional_dependencies = true, + }; + + pub const workspace = Features{ .dev_dependencies = true, - .scripts = false, - .peer_dependencies = true, - .is_main = false, - .dependencies = true, + .optional_dependencies = true, + .scripts = true, + .workspaces = true, }; pub const link = Features{ - .optional_dependencies = false, - .dev_dependencies = false, - .scripts = false, - .peer_dependencies = false, - .is_main = false, .dependencies = false, + .peer_dependencies = false, }; pub const npm = Features{ @@ -1265,34 +1266,58 @@ const PackageInstall = struct { } pub fn installFromLink(this: *PackageInstall, skip_delete: bool) Result { - + const dest_path = this.destination_dir_subpath; // If this fails, we don't care. // we'll catch it the next error - if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) this.uninstall() catch {}; + if (!skip_delete and !strings.eqlComptime(dest_path, ".")) this.uninstall() catch {}; - // cache_dir_subpath in here is actually the full path to the symlink pointing to the linked package - const symlinked_path = this.cache_dir_subpath; - - std.os.symlinkatZ(symlinked_path, this.destination_dir.dir.fd, this.destination_dir_subpath) catch |err| { - return Result{ + const subdir = std.fs.path.dirname(dest_path); + var dest_dir = if (subdir) |dir| brk: { + break :brk this.destination_dir.dir.makeOpenPath(dir, .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .linking, }, }; + } else this.destination_dir.dir; + defer { + if (subdir != null) dest_dir.close(); + } + + var dest_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const dest_dir_path = dest_dir.realpath(".", &dest_buf) catch |err| return Result{ + .fail = .{ + .err = err, + .step = .linking, + }, }; + // cache_dir_subpath in here is actually the full path to the symlink pointing to the linked package + const symlinked_path = this.cache_dir_subpath; + var to_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const to_path = this.cache_dir.dir.realpath(symlinked_path, &to_buf) catch |err| return Result{ + .fail = .{ + .err = err, + .step = .linking, + }, + }; + const target = Path.relative(dest_dir_path, to_path); - if (isDanglingSymlink(symlinked_path)) { - return Result{ - .fail = .{ - .err = error.DanglingSymlink, - .step = .linking, - }, - }; - } + std.os.symlinkat(target, dest_dir.fd, std.fs.path.basename(dest_path)) catch |err| return Result{ + .fail = .{ + .err = err, + .step = .linking, + }, + }; + + if (isDanglingSymlink(symlinked_path)) return Result{ + .fail = .{ + .err = error.DanglingSymlink, + .step = .linking, + }, + }; return Result{ - .success = void{}, + .success = {}, }; } @@ -1498,7 +1523,6 @@ pub const PackageManager = struct { const PreallocatedNetworkTasks = std.BoundedArray(NetworkTask, 1024); const NetworkTaskQueue = std.HashMapUnmanaged(u64, void, IdentityContext(u64), 80); - const PackageIndex = std.AutoHashMapUnmanaged(u64, *Package); pub var verbose_install = false; const PackageDedupeList = std.HashMapUnmanaged( @@ -2349,7 +2373,24 @@ pub const PackageManager = struct { .folder => { // relative to cwd - const res = FolderResolution.getOrPut(.{ .relative = void{} }, version, version.value.folder.slice(this.lockfile.buffers.string_bytes.items), this); + const res = FolderResolution.getOrPut(.{ .relative = .folder }, version, version.value.folder.slice(this.lockfile.buffers.string_bytes.items), this); + + switch (res) { + .err => |err| return err, + .package_id => |package_id| { + successFn(this, dependency_id, package_id); + return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id) }; + }, + + .new_package_id => |package_id| { + successFn(this, dependency_id, package_id); + return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; + }, + } + }, + .workspace => { + // relative to cwd + const res = FolderResolution.getOrPut(.{ .relative = .workspace }, version, version.value.workspace.slice(this.lockfile.buffers.string_bytes.items), this); switch (res) { .err => |err| return err, @@ -2370,12 +2411,12 @@ pub const PackageManager = struct { switch (res) { .err => |err| return err, .package_id => |package_id| { - this.lockfile.buffers.resolutions.items[dependency_id] = package_id; + successFn(this, dependency_id, package_id); return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id) }; }, .new_package_id => |package_id| { - this.lockfile.buffers.resolutions.items[dependency_id] = package_id; + successFn(this, dependency_id, package_id); return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; }, } @@ -2431,14 +2472,6 @@ pub const PackageManager = struct { return &task.threadpool_task; } - pub inline fn enqueueDependency(this: *PackageManager, id: u32, dependency: Dependency, resolution: PackageID) !void { - return try this.enqueueDependencyWithMain(id, dependency, resolution, false); - } - - pub inline fn enqueueMainDependency(this: *PackageManager, id: u32, dependency: Dependency, resolution: PackageID) !void { - return try this.enqueueDependencyWithMain(id, dependency, resolution, true); - } - pub fn dynamicRootDependencies(this: *PackageManager) *std.ArrayList(Dependency.Pair) { if (this.dynamic_root_dependencies == null) { const root_deps = this.lockfile.rootPackage().?.dependencies.get(this.lockfile.buffers.dependencies.items); @@ -2542,15 +2575,14 @@ pub const PackageManager = struct { // it might really be main if (!this.isRootDependency(id)) if (!dependency.behavior.isEnabled(switch (dependency.version.tag) { - .folder => this.options.remote_package_features, - .dist_tag, .npm => this.options.remote_package_features, + .dist_tag, .folder, .npm => this.options.remote_package_features, else => Features{}, })) return; } switch (dependency.version.tag) { - .folder, .npm, .dist_tag => { + .dist_tag, .folder, .npm => { retry_from_manifests_ptr: while (true) { var resolve_result_ = this.getOrPutResolvedPackage( name_hash, @@ -2734,7 +2766,7 @@ pub const PackageManager = struct { } return; }, - .symlink => { + .symlink, .workspace => { const _result = this.getOrPutResolvedPackage( name_hash, name, @@ -2917,10 +2949,11 @@ pub const PackageManager = struct { const dependency = this.lockfile.buffers.dependencies.items[dependency_id]; const resolution = this.lockfile.buffers.resolutions.items[dependency_id]; - try this.enqueueDependency( + try this.enqueueDependencyWithMain( dependency_id, dependency, resolution, + false, ); }, @@ -3472,8 +3505,14 @@ pub const PackageManager = struct { positionals: []const string = &[_]string{}, update: Update = Update{}, dry_run: bool = false, - remote_package_features: Features = Features{ .peer_dependencies = false, .optional_dependencies = true }, - local_package_features: Features = Features{ .peer_dependencies = false, .dev_dependencies = true }, + remote_package_features: Features = Features{ + .optional_dependencies = true, + .peer_dependencies = false, + }, + local_package_features: Features = Features{ + .dev_dependencies = true, + .peer_dependencies = false, + }, // The idea here is: // 1. package has a platform-specific binary to install // 2. To prevent downloading & installing incompatible versions, they stick the "real" one in optionalDependencies @@ -5657,13 +5696,24 @@ pub const PackageManager = struct { // "mineflayer": "file:." if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; - installer.cache_dir = .{ .dir = std.fs.cwd() }; } else { @memcpy(&this.folder_path_buf, folder.ptr, folder.len); this.folder_path_buf[folder.len] = 0; installer.cache_dir_subpath = std.meta.assumeSentinel(this.folder_path_buf[0..folder.len], 0); - installer.cache_dir = .{ .dir = std.fs.cwd() }; } + installer.cache_dir = .{ .dir = std.fs.cwd() }; + }, + .workspace => { + const folder = resolution.value.workspace.slice(buf); + // Handle when a package depends on itself + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + installer.cache_dir_subpath = "."; + } else { + @memcpy(&this.folder_path_buf, folder.ptr, folder.len); + this.folder_path_buf[folder.len] = 0; + installer.cache_dir_subpath = std.meta.assumeSentinel(this.folder_path_buf[0..folder.len], 0); + } + installer.cache_dir = .{ .dir = std.fs.cwd() }; }, .symlink => { const directory = this.manager.globalLinkDir() catch |err| { @@ -5725,7 +5775,7 @@ pub const PackageManager = struct { if (needs_install) { const result: PackageInstall.Result = switch (resolution.tag) { - .symlink => installer.installFromLink(this.skip_delete), + .symlink, .workspace => installer.installFromLink(this.skip_delete), else => installer.install(this.skip_delete), }; switch (result) { @@ -6284,12 +6334,13 @@ pub const PackageManager = struct { ctx.log, package_json_source, Features{ - .optional_dependencies = true, + .check_for_duplicate_dependencies = true, .dev_dependencies = true, .is_main = true, - .check_for_duplicate_dependencies = true, + .optional_dependencies = true, .peer_dependencies = false, .scripts = true, + .workspaces = true, }, ); manager.lockfile.scripts = lockfile.scripts; @@ -6409,12 +6460,13 @@ pub const PackageManager = struct { ctx.log, package_json_source, Features{ - .optional_dependencies = true, + .check_for_duplicate_dependencies = true, .dev_dependencies = true, .is_main = true, - .check_for_duplicate_dependencies = true, + .optional_dependencies = true, .peer_dependencies = false, .scripts = true, + .workspaces = true, }, ); diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 91b0d4c53..4e008ac1e 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -92,10 +92,6 @@ pub const SmallExternalStringList = ExternalSlice(String); /// The version of the lockfile format, intended to prevent data corruption for format changes. format: FormatVersion = .v1, -/// Not used yet. -/// Eventually, this will be a relative path to a parent lockfile -workspace_path: string = "", - meta_hash: MetaHash = zero_hash, packages: Lockfile.Package.List = Lockfile.Package.List{}, @@ -110,6 +106,7 @@ allocator: std.mem.Allocator, scratch: Scratch = Scratch{}, scripts: Scripts = .{}, +workspace_paths: std.ArrayHashMapUnmanaged(u32, String, ArrayIdentityContext, false) = .{}, const Stream = std.io.FixedBufferStream([]u8); pub const default_filename = "bun.lockb"; @@ -187,9 +184,9 @@ pub fn loadFromDisk(this: *Lockfile, allocator: std.mem.Allocator, log: *logger. pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: std.mem.Allocator, log: *logger.Log) LoadFromDiskResult { var stream = Stream{ .buffer = buf, .pos = 0 }; - this.workspace_path = ""; this.format = FormatVersion.current; this.scripts = .{}; + this.workspace_paths = .{}; Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| { return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } }; @@ -424,7 +421,7 @@ pub const Tree = struct { const name_hashes: []const PackageNameHash = builder.name_hashes; const max_package_id = name_hashes.len; - const dependencies: []const Dependency = builder.dependencies[resolution_list.off .. resolution_list.off + resolution_list.len]; + const dependencies: []const Dependency = builder.dependencies[resolution_list.off..][0..resolution_list.len]; for (resolutions) |pid, j| { // Do not download/install "peerDependencies" @@ -1477,6 +1474,7 @@ pub fn initEmpty(this: *Lockfile, allocator: std.mem.Allocator) !void { .allocator = allocator, .scratch = Scratch.init(allocator), .scripts = .{}, + .workspace_paths = .{}, }; } @@ -1604,7 +1602,7 @@ pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Pack return try appendPackageWithID(this, package_, id); } -pub fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) !Lockfile.Package { +fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) !Lockfile.Package { defer { if (comptime Environment.isDebug) { std.debug.assert(this.getPackageID(package_.name_hash, null, package_.resolution) != null); @@ -1804,6 +1802,14 @@ pub const StringBuffer = std.ArrayListUnmanaged(u8); pub const ExternalStringBuffer = std.ArrayListUnmanaged(ExternalString); pub const Package = extern struct { + name: String = String{}, + name_hash: PackageNameHash = 0, + resolution: Resolution = Resolution{}, + dependencies: DependencySlice = DependencySlice{}, + resolutions: PackageIDSlice = PackageIDSlice{}, + meta: Meta = Meta{}, + bin: Bin = Bin{}, + pub const DependencyGroup = struct { prop: string, field: string, @@ -1813,6 +1819,7 @@ pub const Package = extern struct { pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = @intToEnum(Behavior, Behavior.dev) }; pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = @intToEnum(Behavior, Behavior.optional) }; pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = @intToEnum(Behavior, Behavior.peer) }; + pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = @intToEnum(Behavior, Behavior.workspace) }; }; pub inline fn isDisabled(this: *const Lockfile.Package) bool { @@ -2348,7 +2355,7 @@ pub const Package = extern struct { source: logger.Source, comptime features: Features, ) !void { - return try parse(lockfile, package, allocator, log, source, void, void{}, features); + return parse(lockfile, package, allocator, log, source, void, void{}, features); } pub fn parse( @@ -2390,7 +2397,123 @@ pub const Package = extern struct { ); } - pub fn parseWithJSON( + fn parseDependency( + lockfile: *Lockfile, + allocator: std.mem.Allocator, + log: *logger.Log, + source: logger.Source, + comptime group: DependencyGroup, + string_builder: *StringBuilder, + comptime features: Features, + package_dependencies: []Dependency, + dependencies: []Dependency, + in_workspace: bool, + tag: ?Dependency.Version.Tag, + workspace_path: ?String, + external_name: ExternalString, + version: string, + key: Expr, + value: Expr, + ) !?Dependency { + const external_version = string_builder.append(String, version); + var buf = lockfile.buffers.string_bytes.items; + const sliced = external_version.sliced(buf); + + var dependency_version = Dependency.parseWithOptionalTag( + allocator, + sliced.slice, + tag, + &sliced, + log, + ) orelse Dependency.Version{}; + + switch (dependency_version.tag) { + .folder => { + dependency_version.value.folder = string_builder.append( + String, + Path.relative( + FileSystem.instance.top_level_dir, + Path.joinAbsString( + FileSystem.instance.top_level_dir, + &[_]string{ + source.path.name.dir, + dependency_version.value.folder.slice(buf), + }, + .posix, + ), + ), + ); + }, + .workspace => if (workspace_path) |path| { + dependency_version.value.workspace = path; + } else { + const path = string_builder.append( + String, + Path.relative( + FileSystem.instance.top_level_dir, + Path.joinAbsString( + FileSystem.instance.top_level_dir, + &[_]string{ + source.path.name.dir, + dependency_version.value.workspace.slice(buf), + }, + .posix, + ), + ), + ); + dependency_version.value.workspace = path; + var workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, @truncate(u32, external_name.hash)); + if (workspace_entry.found_existing) return error.@"Workspace name already exists"; + workspace_entry.value_ptr.* = path; + }, + else => {}, + } + + const this_dep = Dependency{ + .behavior = if (group.behavior.isPeer()) group.behavior else group.behavior.setWorkspace(in_workspace), + .name = external_name.value, + .name_hash = external_name.hash, + .version = dependency_version, + }; + + if (comptime features.check_for_duplicate_dependencies) { + var entry = lockfile.scratch.duplicate_checker_map.getOrPutAssumeCapacity(external_name.hash); + if (entry.found_existing) { + // duplicate dependencies are allowed in optionalDependencies + if (comptime group.behavior.isOptional()) { + for (package_dependencies[0 .. package_dependencies.len - dependencies.len]) |package_dep, j| { + if (package_dep.name_hash == this_dep.name_hash) { + package_dependencies[j] = this_dep; + break; + } + } + return null; + } else { + var notes = try allocator.alloc(logger.Data, 1); + + notes[0] = logger.Data{ + .text = try std.fmt.allocPrint(lockfile.allocator, "\"{s}\" originally specified here", .{external_name.slice(buf)}), + .location = logger.Location.init_or_nil(&source, source.rangeOfString(entry.value_ptr.*)), + }; + + try log.addRangeErrorFmtWithNotes( + &source, + source.rangeOfString(key.loc), + lockfile.allocator, + notes, + "Duplicate dependency: \"{s}\" specified in package.json", + .{external_name.slice(buf)}, + ); + } + } + + entry.value_ptr.* = value.loc; + } + + return this_dep; + } + + fn parseWithJSON( package: *Lockfile.Package, lockfile: *Lockfile, allocator: std.mem.Allocator, @@ -2500,7 +2623,8 @@ pub const Package = extern struct { @as(usize, @boolToInt(features.dependencies)) + @as(usize, @boolToInt(features.dev_dependencies)) + @as(usize, @boolToInt(features.optional_dependencies)) + - @as(usize, @boolToInt(features.peer_dependencies)) + @as(usize, @boolToInt(features.peer_dependencies)) + + @as(usize, @boolToInt(features.workspaces)) ]DependencyGroup = undefined; var out_group_i: usize = 0; if (features.dependencies) { @@ -2522,25 +2646,73 @@ pub const Package = extern struct { out_group_i += 1; } + if (features.workspaces) { + out_groups[out_group_i] = DependencyGroup.workspaces; + out_group_i += 1; + } + break :brk out_groups; }; + var workspace_names: []string = &.{}; inline for (dependency_groups) |group| { - if (json.asProperty(group.prop)) |dependencies_q| { - if (dependencies_q.expr.data == .e_object) { - for (dependencies_q.expr.data.e_object.properties.slice()) |item| { - const key = item.key.?.asString(allocator) orelse ""; - const value = item.value.?.asString(allocator) orelse ""; + if (json.asProperty(group.prop)) |dependencies_q| brk: { + switch (dependencies_q.expr.data) { + .e_array => |arr| { + if (arr.items.len == 0) break :brk; + + workspace_names = try allocator.alloc(string, arr.items.len); + + var workspace_log = logger.Log.init(allocator); + defer log.deinit(); + + var workspace_buf = try allocator.alloc(u8, 1024); + defer allocator.free(workspace_buf); + + for (arr.slice()) |item, i| { + const path = item.asString(allocator) orelse return error.InvalidPackageJSON; + + var workspace_dir = try std.fs.cwd().openDir(path, .{}); + defer workspace_dir.close(); + + var workspace_file = try workspace_dir.openFile("package.json", .{ .mode = .read_only }); + defer workspace_file.close(); - string_builder.count(key); - string_builder.count(value); + const workspace_read = try workspace_file.preadAll(workspace_buf, 0); + const workspace_source = logger.Source.initPathString(path, workspace_buf[0..workspace_read]); - // If it's a folder, pessimistically assume we will need a maximum path - if (Dependency.Version.Tag.infer(value) == .folder) { + initializeStore(); + + var workspace_json = try json_parser.PackageJSONVersionChecker.init(allocator, &workspace_source, &workspace_log); + + _ = try workspace_json.parseExpr(); + if (!workspace_json.has_found_name) return error.InvalidPackageJSON; + + const workspace_name = workspace_json.found_name; + + string_builder.count(workspace_name); + string_builder.count(path); string_builder.cap += bun.MAX_PATH_BYTES; + workspace_names[i] = try allocator.dupe(u8, workspace_name); } - } - total_dependencies_count += @truncate(u32, dependencies_q.expr.data.e_object.properties.len); + total_dependencies_count += @truncate(u32, arr.items.len); + }, + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const key = item.key.?.asString(allocator) orelse return error.InvalidPackageJSON; + const value = item.value.?.asString(allocator) orelse return error.InvalidPackageJSON; + + string_builder.count(key); + string_builder.count(value); + + // If it's a folder, pessimistically assume we will need a maximum path + if (Dependency.Version.Tag.infer(value) == .folder) { + string_builder.cap += bun.MAX_PATH_BYTES; + } + } + total_dependencies_count += @truncate(u32, obj.properties.len); + }, + else => {}, } } } @@ -2668,97 +2840,83 @@ pub const Package = extern struct { try lockfile.scratch.duplicate_checker_map.ensureTotalCapacity(total_dependencies_count); } + const in_workspace = lockfile.workspace_paths.contains(@truncate(u32, package.name_hash)); inline for (dependency_groups) |group| { - if (json.asProperty(group.prop)) |dependencies_q| { - if (dependencies_q.expr.data == .e_object) { - const dependency_props: []const JSAst.G.Property = dependencies_q.expr.data.e_object.properties.slice(); - var i: usize = 0; - outer: while (i < dependency_props.len) { - const item = dependency_props[i]; - - const name_ = item.key.?.asString(allocator) orelse ""; - const version_ = item.value.?.asString(allocator) orelse ""; - - const external_name = string_builder.append(ExternalString, name_); - - const external_version = string_builder.append(String, version_); - - const sliced = external_version.sliced( - lockfile.buffers.string_bytes.items, - ); - - var dependency_version = Dependency.parse( - allocator, - sliced.slice, - &sliced, - log, - ) orelse Dependency.Version{}; - - if (dependency_version.tag == .folder) { - const folder_path = dependency_version.value.folder.slice(lockfile.buffers.string_bytes.items); - dependency_version.value.folder = string_builder.append( - String, - Path.relative( - FileSystem.instance.top_level_dir, - Path.joinAbsString( - FileSystem.instance.top_level_dir, - &[_]string{ - source.path.name.dir, - folder_path, - }, - .posix, - ), - ), - ); + if (json.asProperty(group.prop)) |dependencies_q| brk: { + switch (dependencies_q.expr.data) { + .e_array => |arr| { + if (arr.items.len == 0) break :brk; + + for (arr.slice()) |item, i| { + const name = workspace_names[i]; + defer allocator.free(name); + + const external_name = string_builder.append(ExternalString, name); + const path = item.asString(allocator).?; + + if (try parseDependency( + lockfile, + allocator, + log, + source, + group, + &string_builder, + features, + package_dependencies, + dependencies, + in_workspace, + .workspace, + null, + external_name, + path, + item, + item, + )) |dep| { + dependencies[0] = dep; + dependencies = dependencies[1..]; + } } - const this_dep = Dependency{ - .behavior = group.behavior, - .name = external_name.value, - .name_hash = external_name.hash, - .version = dependency_version, - }; - - if (comptime features.check_for_duplicate_dependencies) { - var entry = lockfile.scratch.duplicate_checker_map.getOrPutAssumeCapacity(external_name.hash); - if (entry.found_existing) { - // duplicate dependencies are allowed in optionalDependencies - if (comptime group.behavior.isOptional()) { - for (package_dependencies[0 .. package_dependencies.len - dependencies.len]) |package_dep, j| { - if (package_dep.name_hash == this_dep.name_hash) { - package_dependencies[j] = this_dep; - break; - } - } - - i += 1; - continue :outer; - } else { - var notes = try allocator.alloc(logger.Data, 1); - - notes[0] = logger.Data{ - .text = try std.fmt.allocPrint(lockfile.allocator, "\"{s}\" originally specified here", .{name_}), - .location = logger.Location.init_or_nil(&source, source.rangeOfString(entry.value_ptr.*)), - }; - - try log.addRangeErrorFmtWithNotes( - &source, - source.rangeOfString(item.key.?.loc), - lockfile.allocator, - notes, - "Duplicate dependency: \"{s}\" specified in package.json", - .{name_}, - ); - } + allocator.free(workspace_names); + }, + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const key = item.key.?; + const value = item.value.?; + const external_name = string_builder.append(ExternalString, key.asString(allocator).?); + const version = value.asString(allocator).?; + var tag: ?Dependency.Version.Tag = null; + var workspace_path: ?String = null; + + if (lockfile.workspace_paths.get(@truncate(u32, external_name.hash))) |path| { + tag = .workspace; + workspace_path = path; } - entry.value_ptr.* = item.value.?.loc; + if (try parseDependency( + lockfile, + allocator, + log, + source, + group, + &string_builder, + features, + package_dependencies, + dependencies, + in_workspace, + tag, + workspace_path, + external_name, + version, + key, + value, + )) |dep| { + dependencies[0] = dep; + dependencies = dependencies[1..]; + } } - - dependencies[0] = this_dep; - dependencies = dependencies[1..]; - i += 1; - } + }, + else => {}, } } } @@ -2815,14 +2973,6 @@ pub const Package = extern struct { } }; - name: String = String{}, - name_hash: PackageNameHash = 0, - resolution: Resolution = Resolution{}, - dependencies: DependencySlice = DependencySlice{}, - resolutions: PackageIDSlice = PackageIDSlice{}, - meta: Meta = Meta{}, - bin: Bin = Bin{}, - pub const Serializer = struct { pub const sizes = blk: { const fields = std.meta.fields(Lockfile.Package); @@ -2950,6 +3100,7 @@ pub fn deinit(this: *Lockfile) void { this.packages.deinit(this.allocator); this.unique_packages.deinit(this.allocator); this.string_pool.deinit(); + this.workspace_paths.deinit(this.allocator); } const Buffers = struct { @@ -3215,10 +3366,6 @@ pub const Serializer = struct { try writer.writeIntLittle(u64, 0); const end = try stream.getPos(); - try writer.writeIntLittle(u64, this.workspace_path.len); - if (this.workspace_path.len > 0) - try writer.writeAll(this.workspace_path); - try writer.writeAll(&alignment_bytes_to_repeat_buffer); _ = try std.os.pwrite(stream.handle, std.mem.asBytes(&end), pos); @@ -3265,15 +3412,6 @@ pub const Serializer = struct { std.debug.assert(stream.pos == total_buffer_size); - load_workspace: { - const workspace_path_len = reader.readIntLittle(u64) catch break :load_workspace; - if (workspace_path_len > 0 and workspace_path_len < bun.MAX_PATH_BYTES) { - var workspace_path = try allocator.alloc(u8, workspace_path_len); - const len = reader.readAll(workspace_path) catch break :load_workspace; - lockfile.workspace_path = workspace_path[0..len]; - } - } - lockfile.scratch = Lockfile.Scratch.init(allocator); { @@ -3281,10 +3419,19 @@ pub const Serializer = struct { lockfile.unique_packages = try Bitset.initFull(allocator, lockfile.packages.len); lockfile.string_pool = StringPool.initContext(allocator, .{}); try lockfile.package_index.ensureTotalCapacity(@truncate(u32, lockfile.packages.len)); - var slice = lockfile.packages.slice(); - var name_hashes = slice.items(.name_hash); + const slice = lockfile.packages.slice(); + const name_hashes = slice.items(.name_hash); + const resolutions = slice.items(.resolution); for (name_hashes) |name_hash, id| { try lockfile.getOrPutID(@truncate(PackageID, id), name_hash); + + const resolution = resolutions[id]; + switch (resolution.tag) { + .workspace => { + try lockfile.workspace_paths.put(allocator, @truncate(u32, name_hash), resolution.value.workspace); + }, + else => {}, + } } } diff --git a/src/install/resolution.zig b/src/install/resolution.zig index d21855d7c..12fb53449 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -191,7 +191,7 @@ pub const Resolution = extern struct { .gitlab => try formatter.resolution.value.gitlab.formatAs("gitlab", formatter.buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace://{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}), .symlink => try std.fmt.format(writer, "link://{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), - .single_file_module => try std.fmt.format(writer, "link://{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), + .single_file_module => try std.fmt.format(writer, "module://{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}), else => {}, } } @@ -212,8 +212,8 @@ pub const Resolution = extern struct { .github => try formatter.resolution.value.github.formatAs("github", formatter.buf, layout, opts, writer), .gitlab => try formatter.resolution.value.gitlab.formatAs("gitlab", formatter.buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace://{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}), - .symlink => try std.fmt.format(writer, "link:{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), - .single_file_module => try std.fmt.format(writer, "link://{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), + .symlink => try std.fmt.format(writer, "link://{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), + .single_file_module => try std.fmt.format(writer, "module://{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}), else => {}, } } diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index 8140cffc4..4d0391b64 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -6,7 +6,8 @@ const Npm = @import("../npm.zig"); const logger = @import("bun").logger; const FileSystem = @import("../../fs.zig").FileSystem; const JSAst = bun.JSAst; -const string = @import("../../string_types.zig").string; +const string = bun.string; +const stringZ = bun.stringZ; const Features = @import("../install.zig").Features; const IdentityContext = @import("../../identity_context.zig").IdentityContext; const strings = @import("bun").strings; @@ -51,6 +52,7 @@ pub const FolderResolution = union(Tag) { pub const Resolver = NewResolver(Resolution.Tag.folder); pub const SymlinkResolver = NewResolver(Resolution.Tag.symlink); + pub const WorkspaceResolver = NewResolver(Resolution.Tag.workspace); pub const CacheFolderResolver = struct { folder_path: []const u8 = "", version: Semver.Version, @@ -70,7 +72,11 @@ pub const FolderResolution = union(Tag) { pub fn count(_: @This(), comptime Builder: type, _: Builder, _: JSAst.Expr) void {} }; - pub fn normalizePackageJSONPath(global_or_relative: GlobalOrRelative, joined: *[bun.MAX_PATH_BYTES]u8, non_normalized_path: string) [2]string { + const Paths = struct { + abs: stringZ, + rel: string, + }; + fn normalizePackageJSONPath(global_or_relative: GlobalOrRelative, joined: *[bun.MAX_PATH_BYTES]u8, non_normalized_path: string) Paths { var abs: string = ""; var rel: string = ""; // We consider it valid if there is a package.json in the folder @@ -113,20 +119,23 @@ pub const FolderResolution = union(Tag) { // We store the folder name without package.json rel = abs[0 .. abs.len - "/package.json".len]; } + joined[abs.len] = 0; - return .{ abs, rel }; + return .{ + .abs = joined[0..abs.len :0], + .rel = rel, + }; } - pub fn readPackageJSONFromDisk( + fn readPackageJSONFromDisk( manager: *PackageManager, - joinedZ: [:0]const u8, - abs: []const u8, + abs: stringZ, version: Dependency.Version, comptime features: Features, comptime ResolverType: type, resolver: ResolverType, ) !Lockfile.Package { - var package_json: std.fs.File = try std.fs.cwd().openFileZ(joinedZ, .{ .mode = .read_only }); + var package_json: std.fs.File = try std.fs.cwd().openFileZ(abs, .{ .mode = .read_only }); defer package_json.close(); var package = Lockfile.Package{}; var body = Npm.Registry.BodyPool.get(manager.allocator); @@ -160,43 +169,49 @@ pub const FolderResolution = union(Tag) { pub const GlobalOrRelative = union(enum) { global: []const u8, - relative: void, + relative: Dependency.Version.Tag, cache_folder: []const u8, }; pub fn getOrPut(global_or_relative: GlobalOrRelative, version: Dependency.Version, non_normalized_path: string, manager: *PackageManager) FolderResolution { var joined: [bun.MAX_PATH_BYTES]u8 = undefined; const paths = normalizePackageJSONPath(global_or_relative, &joined, non_normalized_path); - const abs = paths[0]; - const rel = paths[1]; + const abs = paths.abs; + const rel = paths.rel; var entry = manager.folders.getOrPut(manager.allocator, hash(abs)) catch unreachable; if (entry.found_existing) return entry.value_ptr.*; - joined[abs.len] = 0; - var joinedZ: [:0]u8 = joined[0..abs.len :0]; const package: Lockfile.Package = switch (global_or_relative) { .global => readPackageJSONFromDisk( manager, - joinedZ, abs, version, Features.link, SymlinkResolver, SymlinkResolver{ .folder_path = non_normalized_path }, ), - .relative => readPackageJSONFromDisk( - manager, - joinedZ, - abs, - version, - Features.folder, - Resolver, - Resolver{ .folder_path = rel }, - ), + .relative => |tag| switch (tag) { + .folder => readPackageJSONFromDisk( + manager, + abs, + version, + Features.folder, + Resolver, + Resolver{ .folder_path = rel }, + ), + .workspace => readPackageJSONFromDisk( + manager, + abs, + version, + Features.workspace, + WorkspaceResolver, + WorkspaceResolver{ .folder_path = rel }, + ), + else => unreachable, + }, .cache_folder => readPackageJSONFromDisk( manager, - joinedZ, abs, version, Features.npm, diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 149ca04d7..8e05d6441 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -1857,10 +1857,10 @@ pub const Resolver = struct { // the unknown package is the root package package = Package{ .name = Semver.String.init("", ""), - }; - package.resolution = .{ - .tag = .root, - .value = .{ .root = {} }, + .resolution = .{ + .tag = .root, + .value = .{ .root = {} }, + }, }; package = pm.lockfile.appendPackage(package) catch |err| { return .{ .failure = err }; diff --git a/test/bun.js/install/bun-install.test.ts b/test/bun.js/install/bun-install.test.ts index 58bd4918e..78b1c12b8 100644 --- a/test/bun.js/install/bun-install.test.ts +++ b/test/bun.js/install/bun-install.test.ts @@ -1,26 +1,53 @@ -import { spawn, spawnSync } from "bun"; -import { describe, expect, it, test } from "bun:test"; +import { spawn } from "bun"; +import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test"; import { bunExe } from "bunExe"; +import { mkdir, mkdtemp, readdir, readlink, rm, writeFile } from "fs/promises"; +import { join } from "path"; +import { tmpdir } from "os"; -test("bun install", async () => { - const urls = []; - const server = Bun.serve({ +let handler, package_dir, requested, server; + +function resetHanlder() { + handler = function() { + return new Response("Tea Break~", { status: 418 }); + }; +} + +beforeAll(() => { + server = Bun.serve({ async fetch(request) { - try { - expect(request.method).toBe("GET"); - expect(request.headers.get("accept")).toBe("application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"); - expect(request.headers.get("npm-auth-type")).toBe(null); - expect(await request.text()).toBe(""); - urls.push(request.url); - return new Response("bar", { status: 404 }); - } finally { - server.stop(); - } + requested++; + return await handler(request); }, port: 54321, }); +}); +afterAll(() => { + server.stop(); +}); +beforeEach(async () => { + resetHanlder(); + requested = 0; + package_dir = await mkdtemp(join(tmpdir(), "bun-install.test")); +}); +afterEach(async () => { + resetHanlder(); + await rm(package_dir, { force: true, recursive: true }); +}); + +it("should handle missing package", async () => { + const urls: string[] = []; + handler = async(request) => { + expect(request.method).toBe("GET"); + expect(request.headers.get("accept")).toBe("application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"); + expect(request.headers.get("npm-auth-type")).toBe(null); + expect(await request.text()).toBe(""); + urls.push(request.url); + return new Response("bar", { status: 404 }); + }; const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install", "foo", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, stdout: null, stdin: "pipe", stderr: "pipe", @@ -29,42 +56,37 @@ test("bun install", async () => { BUN_DEBUG_QUIET_LOGS: "1", }, }); - expect(stdout).toBeDefined(); expect(stderr).toBeDefined(); - expect(await new Response(stdout).text()).toBe(""); var err = await new Response(stderr).text(); - expect(err.split(/\n/)).toContain('error: package "foo" not found localhost/foo 404'); + expect(err.split(/\r?\n/)).toContain('error: package "foo" not found localhost/foo 404'); + expect(stdout).toBeDefined(); + expect(await new Response(stdout).text()).toBe(""); expect(urls).toContain("http://localhost:54321/foo"); expect(await exited).toBe(1); + expect(requested).toBe(1); }); -test("bun install @scoped", async () => { +it("should handle @scoped authentication", async () => { let seen_token = false; const url = "http://localhost:54321/@foo/bar"; - const urls = []; - const server = Bun.serve({ - async fetch(request) { - try { - expect(request.method).toBe("GET"); - expect(request.headers.get("accept")).toBe("application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"); - if (request.url === url) { - expect(request.headers.get("authorization")).toBe("Bearer bar"); - expect(request.headers.get("npm-auth-type")).toBe("legacy"); - seen_token = true; - } else { - expect(request.headers.get("npm-auth-type")).toBe(null); - } - expect(await request.text()).toBe(""); - urls.push(request.url); - return new Response("Tea?", { status: 418 }); - } finally { - server.stop(); - } - }, - port: 54321, - }); + const urls: string[] = []; + handler = async(request) => { + expect(request.method).toBe("GET"); + expect(request.headers.get("accept")).toBe("application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"); + if (request.url === url) { + expect(request.headers.get("authorization")).toBe("Bearer bar"); + expect(request.headers.get("npm-auth-type")).toBe("legacy"); + seen_token = true; + } else { + expect(request.headers.get("npm-auth-type")).toBe(null); + } + expect(await request.text()).toBe(""); + urls.push(request.url); + return new Response("Feeling lucky?", { status: 555 }); + }; const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install", "@foo/bar", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, stdout: null, stdin: "pipe", stderr: "pipe", @@ -73,12 +95,246 @@ test("bun install @scoped", async () => { BUN_DEBUG_QUIET_LOGS: "1", }, }); - expect(stdout).toBeDefined(); expect(stderr).toBeDefined(); - expect(await new Response(stdout).text()).toBe(""); var err = await new Response(stderr).text(); - expect(err.split(/\n/)).toContain(`GET ${url} - 418`); + expect(err.split(/\r?\n/)).toContain(`GET ${url} - 555`); + expect(stdout).toBeDefined(); + expect(await new Response(stdout).text()).toBe(""); expect(urls).toContain(url); expect(seen_token).toBe(true); expect(await exited).toBe(1); + expect(requested).toBe(1); +}); + +it("should handle workspaces", async () => { + await writeFile(join(package_dir, "package.json"), JSON.stringify({ + name: "Foo", + version: "0.0.1", + workspaces: [ + "bar", + ], + })); + await mkdir(join(package_dir, "bar")); + await writeFile(join(package_dir, "bar", "package.json"), JSON.stringify({ + name: "Bar", + version: "0.0.2", + })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: { + ...process.env, + BUN_DEBUG_QUIET_LOGS: "1", + }, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out).toContain("+ Bar@workspace://bar"); + expect(out).toContain("1 packages installed"); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdir(join(package_dir, "node_modules"))).toEqual(["Bar"]); + expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); +}); + +it("should handle inter-dependency between workspaces", async () => { + await writeFile(join(package_dir, "package.json"), JSON.stringify({ + name: "Foo", + version: "0.0.1", + workspaces: [ + "bar", + "packages/baz", + ], + })); + await mkdir(join(package_dir, "bar")); + await writeFile(join(package_dir, "bar", "package.json"), JSON.stringify({ + name: "Bar", + version: "0.0.2", + dependencies: { + "Baz": "0.0.3", + }, + })); + await mkdir(join(package_dir, "packages", "baz"), { recursive: true }); + await writeFile(join(package_dir, "packages", "baz", "package.json"), JSON.stringify({ + name: "Baz", + version: "0.0.3", + dependencies: { + "Bar": "0.0.2", + }, + })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: { + ...process.env, + BUN_DEBUG_QUIET_LOGS: "1", + }, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out).toContain("+ Bar@workspace://bar"); + expect(out).toContain("+ Baz@workspace://packages/baz"); + expect(out).toContain("2 packages installed"); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdir(join(package_dir, "node_modules"))).toEqual(["Bar", "Baz"]); + expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await readlink(join(package_dir, "node_modules", "Baz"))).toBe(join("..", "packages", "baz")); +}); + +it("should handle inter-dependency between workspaces (devDependencies)", async () => { + await writeFile(join(package_dir, "package.json"), JSON.stringify({ + name: "Foo", + version: "0.0.1", + workspaces: [ + "bar", + "packages/baz", + ], + })); + await mkdir(join(package_dir, "bar")); + await writeFile(join(package_dir, "bar", "package.json"), JSON.stringify({ + name: "Bar", + version: "0.0.2", + devDependencies: { + "Baz": "0.0.3", + }, + })); + await mkdir(join(package_dir, "packages", "baz"), { recursive: true }); + await writeFile(join(package_dir, "packages", "baz", "package.json"), JSON.stringify({ + name: "Baz", + version: "0.0.3", + devDependencies: { + "Bar": "0.0.2", + }, + })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: { + ...process.env, + BUN_DEBUG_QUIET_LOGS: "1", + }, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out).toContain("+ Bar@workspace://bar"); + expect(out).toContain("+ Baz@workspace://packages/baz"); + expect(out).toContain("2 packages installed"); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdir(join(package_dir, "node_modules"))).toEqual(["Bar", "Baz"]); + expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await readlink(join(package_dir, "node_modules", "Baz"))).toBe(join("..", "packages", "baz")); +}); + +it("should handle inter-dependency between workspaces (optionalDependencies)", async () => { + await writeFile(join(package_dir, "package.json"), JSON.stringify({ + name: "Foo", + version: "0.0.1", + workspaces: [ + "bar", + "packages/baz", + ], + })); + await mkdir(join(package_dir, "bar")); + await writeFile(join(package_dir, "bar", "package.json"), JSON.stringify({ + name: "Bar", + version: "0.0.2", + optionalDependencies: { + "Baz": "0.0.3", + }, + })); + await mkdir(join(package_dir, "packages", "baz"), { recursive: true }); + await writeFile(join(package_dir, "packages", "baz", "package.json"), JSON.stringify({ + name: "Baz", + version: "0.0.3", + optionalDependencies: { + "Bar": "0.0.2", + }, + })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: { + ...process.env, + BUN_DEBUG_QUIET_LOGS: "1", + }, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out).toContain("+ Bar@workspace://bar"); + expect(out).toContain("+ Baz@workspace://packages/baz"); + expect(out).toContain("2 packages installed"); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdir(join(package_dir, "node_modules"))).toEqual(["Bar", "Baz"]); + expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await readlink(join(package_dir, "node_modules", "Baz"))).toBe(join("..", "packages", "baz")); +}); + +it("should ignore peerDependencies within workspaces", async () => { + await writeFile(join(package_dir, "package.json"), JSON.stringify({ + name: "Foo", + version: "0.0.1", + workspaces: [ + "packages/baz", + ], + peerDependencies: { + "Bar": ">=0.0.2", + }, + })); + await mkdir(join(package_dir, "packages", "baz"), { recursive: true }); + await writeFile(join(package_dir, "packages", "baz", "package.json"), JSON.stringify({ + name: "Baz", + version: "0.0.3", + peerDependencies: { + "Moo": ">=0.0.4", + }, + })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: { + ...process.env, + BUN_DEBUG_QUIET_LOGS: "1", + }, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out).toContain("+ Baz@workspace://packages/baz"); + expect(out).toContain("1 packages installed"); + expect(await exited).toBe(0); + expect(requested).toBe(0); + expect(await readdir(join(package_dir, "node_modules"))).toEqual(["Baz"]); + expect(await readlink(join(package_dir, "node_modules", "Baz"))).toBe(join("..", "packages", "baz")); }); |