aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Alex Lam S.L <alexlamsl@gmail.com> 2023-01-24 21:57:25 +0200
committerGravatar GitHub <noreply@github.com> 2023-01-24 11:57:25 -0800
commitf43b67520032817877b5865173faf4b4c789089a (patch)
treeef0d87dfabdae75e456c0d81f1431449ec22d54b
parente47fe2ca00a5d3cbf9710fedc1440aa25025317d (diff)
downloadbun-f43b67520032817877b5865173faf4b4c789089a.tar.gz
bun-f43b67520032817877b5865173faf4b4c789089a.tar.zst
bun-f43b67520032817877b5865173faf4b4c789089a.zip
support GitHub URLs as dependencies (#1875)
-rw-r--r--src/bun.js/module_loader.zig4
-rw-r--r--src/install/dependency.zig120
-rw-r--r--src/install/extract_tarball.zig112
-rw-r--r--src/install/install.zig273
-rw-r--r--src/install/repository.zig8
-rw-r--r--src/libarchive/libarchive.zig30
-rw-r--r--test/bun.js/install/bun-install.test.ts395
7 files changed, 827 insertions, 115 deletions
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
index abddc2f5a..12b4f0463 100644
--- a/src/bun.js/module_loader.zig
+++ b/src/bun.js/module_loader.zig
@@ -395,7 +395,7 @@ pub const ModuleLoader = struct {
this.map.items.len = i;
}
- pub fn onExtract(this: *Queue, package_id: u32, comptime _: PackageManager.Options.LogLevel) void {
+ pub fn onExtract(this: *Queue, package_id: Install.PackageID, _: Install.ExtractData, comptime _: PackageManager.Options.LogLevel) void {
if (comptime Environment.allow_assert) {
const lockfile = this.vm().packageManager().lockfile;
debug("onExtract: {s} ({d})", .{
@@ -406,7 +406,7 @@ pub const ModuleLoader = struct {
this.onPackageID(package_id);
}
- pub fn onPackageID(this: *Queue, package_id: u32) void {
+ pub fn onPackageID(this: *Queue, package_id: Install.PackageID) void {
var values = this.map.items;
for (values) |value| {
var package_ids = value.parse_result.pending_imports.items(.resolution_id);
diff --git a/src/install/dependency.zig b/src/install/dependency.zig
index 8aac4e391..a9c2fa108 100644
--- a/src/install/dependency.zig
+++ b/src/install/dependency.zig
@@ -1,17 +1,19 @@
-const ExternalStringList = @import("./install.zig").ExternalStringList;
+const bun = @import("bun");
+const logger = bun.logger;
+const Environment = @import("../env.zig");
+const Install = @import("./install.zig");
+const ExternalStringList = Install.ExternalStringList;
+const Features = Install.Features;
+const PackageNameHash = Install.PackageNameHash;
+const Repository = @import("./repository.zig").Repository;
const Semver = @import("./semver.zig");
const ExternalString = Semver.ExternalString;
+const SlicedString = Semver.SlicedString;
const String = Semver.String;
const std = @import("std");
-const SlicedString = Semver.SlicedString;
-const PackageNameHash = @import("./install.zig").PackageNameHash;
-const Features = @import("./install.zig").Features;
-const Install = @import("./install.zig");
-const logger = @import("bun").logger;
-const Dependency = @This();
const string = @import("../string_types.zig").string;
const strings = @import("../string_immutable.zig");
-const bun = @import("bun");
+const Dependency = @This();
pub const Pair = struct {
resolution_id: Install.PackageID = Install.invalid_package_id,
@@ -189,7 +191,7 @@ pub const Version = struct {
}
pub fn isLessThan(string_buf: []const u8, lhs: Dependency.Version, rhs: Dependency.Version) bool {
- std.debug.assert(lhs.tag == rhs.tag);
+ if (Environment.allow_assert) std.debug.assert(lhs.tag == rhs.tag);
return strings.cmpStringsAsc({}, lhs.literal.slice(string_buf), rhs.literal.slice(string_buf));
}
@@ -236,6 +238,7 @@ pub const Version = struct {
.npm => strings.eql(lhs.literal.slice(lhs_buf), rhs.literal.slice(rhs_buf)) or
lhs.value.npm.eql(rhs.value.npm, lhs_buf, rhs_buf),
.folder, .dist_tag => lhs.literal.eql(rhs.literal, lhs_buf, rhs_buf),
+ .github => lhs.value.github.eql(rhs.value.github, lhs_buf, rhs_buf),
.tarball => lhs.value.tarball.eql(rhs.value.tarball, lhs_buf, rhs_buf),
.symlink => lhs.value.symlink.eql(rhs.value.symlink, lhs_buf, rhs_buf),
.workspace => lhs.value.workspace.eql(rhs.value.workspace, lhs_buf, rhs_buf),
@@ -274,24 +277,36 @@ pub const Version = struct {
}
pub inline fn isGitHubRepoPath(dependency: string) bool {
+ // Shortest valid expression: u/r
if (dependency.len < 3) return false;
- if (dependency[0] == '/') return false;
+ var hash_index: usize = 0;
var slash_index: usize = 0;
for (dependency) |c, i| {
- // Must be alphanumeric
switch (c) {
'/' => {
+ if (i == 0) return false;
if (slash_index > 0) return false;
slash_index = i;
},
- '\\', 'a'...'z', 'A'...'Z', '0'...'9', '%' => {},
+ '#' => {
+ if (i == 0) return false;
+ if (hash_index > 0) return false;
+ if (slash_index == 0) return false;
+ hash_index = i;
+ },
+ // Not allowed in username
+ '.', '_' => {
+ if (slash_index == 0) return false;
+ },
+ // Must be alphanumeric
+ '-', 'a'...'z', 'A'...'Z', '0'...'9' => {},
else => return false,
}
}
- return slash_index > 0 and slash_index != dependency.len - 1;
+ return hash_index != dependency.len - 1 and slash_index > 0 and slash_index != dependency.len - 1;
}
// this won't work for query string params
@@ -376,7 +391,9 @@ pub const Version = struct {
}
},
'h' => {
- if (strings.hasPrefixComptime(url, "hub:")) return .github;
+ if (strings.hasPrefixComptime(url, "hub:")) {
+ if (isGitHubRepoPath(url["hub:".len..])) return .github;
+ }
},
else => {},
}
@@ -403,7 +420,9 @@ pub const Version = struct {
},
else => {},
}
- if (strings.hasPrefixComptime(url, "github.com/")) return .github;
+ if (strings.hasPrefixComptime(url, "github.com/")) {
+ if (isGitHubRepoPath(url["github.com/".len..])) return .github;
+ }
}
}
},
@@ -489,8 +508,7 @@ pub const Version = struct {
workspace: String,
/// Unsupported, but still parsed so an error can be thrown
git: void,
- /// Unsupported, but still parsed so an error can be thrown
- github: void,
+ github: Repository,
};
};
@@ -625,10 +643,10 @@ pub fn parseWithTag(
alias;
// name should never be empty
- std.debug.assert(!actual.isEmpty());
+ if (Environment.allow_assert) std.debug.assert(!actual.isEmpty());
// tag should never be empty
- std.debug.assert(!tag_to_use.isEmpty());
+ if (Environment.allow_assert) std.debug.assert(!tag_to_use.isEmpty());
return Version{
.literal = sliced.value(),
@@ -641,6 +659,68 @@ pub fn parseWithTag(
.tag = .dist_tag,
};
},
+ .github => {
+ var from_url = false;
+ var input = dependency;
+ if (strings.hasPrefixComptime(input, "github:")) {
+ input = input["github:".len..];
+ } else if (strings.hasPrefixComptime(input, "http")) {
+ var url = input["http".len..];
+ if (url.len > 2) {
+ switch (url[0]) {
+ ':' => {
+ if (strings.hasPrefixComptime(url, "://")) {
+ url = url["://".len..];
+ }
+ },
+ 's' => {
+ if (strings.hasPrefixComptime(url, "s://")) {
+ url = url["s://".len..];
+ }
+ },
+ else => {},
+ }
+ if (strings.hasPrefixComptime(url, "github.com/")) {
+ input = url["github.com/".len..];
+ from_url = true;
+ }
+ }
+ }
+
+ if (Environment.allow_assert) std.debug.assert(Version.Tag.isGitHubRepoPath(input));
+
+ var hash_index: usize = 0;
+ var slash_index: usize = 0;
+ for (input) |c, i| {
+ switch (c) {
+ '/' => {
+ slash_index = i;
+ },
+ '#' => {
+ hash_index = i;
+ break;
+ },
+ else => {},
+ }
+ }
+
+ var repo = if (hash_index == 0) input[slash_index + 1 ..] else input[slash_index + 1 .. hash_index];
+ if (from_url and strings.endsWithComptime(repo, ".git")) {
+ repo = repo[0 .. repo.len - ".git".len];
+ }
+
+ return Version{
+ .literal = sliced.value(),
+ .value = .{
+ .github = .{
+ .owner = sliced.sub(input[0..slash_index]).value(),
+ .repo = sliced.sub(repo).value(),
+ .committish = if (hash_index == 0) String.from("") else sliced.sub(input[hash_index + 1 ..]).value(),
+ },
+ },
+ .tag = .github,
+ };
+ },
.tarball => {
if (strings.hasPrefixComptime(dependency, "https://") or strings.hasPrefixComptime(dependency, "http://")) {
return Version{
@@ -713,7 +793,7 @@ pub fn parseWithTag(
.literal = sliced.value(),
};
},
- .git, .github => {
+ .git => {
if (log_) |log| log.addErrorFmt(null, logger.Loc.Empty, allocator, "Support for dependency type \"{s}\" is not implemented yet (\"{s}\")", .{ @tagName(tag), dependency }) catch unreachable;
return null;
},
diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig
index 1316302f8..0e813605e 100644
--- a/src/install/extract_tarball.zig
+++ b/src/install/extract_tarball.zig
@@ -1,30 +1,37 @@
-const Output = @import("bun").Output;
-const strings = @import("../string_immutable.zig");
-const string = @import("../string_types.zig").string;
-const Resolution = @import("./resolution.zig").Resolution;
+const bun = @import("bun");
+const default_allocator = bun.default_allocator;
+const Global = bun.Global;
+const json_parser = bun.JSON;
+const logger = bun.logger;
+const Output = bun.Output;
const FileSystem = @import("../fs.zig").FileSystem;
-const Semver = @import("./semver.zig");
+const Install = @import("./install.zig");
+const Features = Install.Features;
+const Lockfile = Install.Lockfile;
+const PackageID = Install.PackageID;
+const PackageManager = Install.PackageManager;
const Integrity = @import("./integrity.zig").Integrity;
-const PackageID = @import("./install.zig").PackageID;
-const PackageManager = @import("./install.zig").PackageManager;
-const std = @import("std");
const Npm = @import("./npm.zig");
+const Resolution = @import("./resolution.zig").Resolution;
+const Semver = @import("./semver.zig");
+const std = @import("std");
+const string = @import("../string_types.zig").string;
+const strings = @import("../string_immutable.zig");
const ExtractTarball = @This();
-const default_allocator = @import("bun").default_allocator;
-const Global = @import("bun").Global;
-const bun = @import("bun");
+
name: strings.StringOrTinyString,
resolution: Resolution,
registry: string,
cache_dir: std.fs.Dir,
temp_dir: std.fs.Dir,
package_id: PackageID,
+dependency_id: PackageID = Install.invalid_package_id,
skip_verify: bool = false,
integrity: Integrity = Integrity{},
url: string = "",
package_manager: *PackageManager,
-pub inline fn run(this: ExtractTarball, bytes: []const u8) !string {
+pub inline fn run(this: ExtractTarball, bytes: []const u8) !Install.ExtractData {
if (!this.skip_verify and this.integrity.tag.isSupported()) {
if (!this.integrity.verify(bytes)) {
Output.prettyErrorln("<r><red>Integrity check failed<r> for tarball: {s}", .{this.name.slice()});
@@ -141,10 +148,11 @@ pub fn buildURLWithPrinter(
}
}
-threadlocal var abs_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
-threadlocal var abs_buf2: [bun.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var final_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var folder_name_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var json_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
-fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
+fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractData {
var tmpdir = this.temp_dir;
var tmpname_buf: [256]u8 = undefined;
const name = this.name.slice();
@@ -156,6 +164,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
}
}
+ var resolved: string = "";
var tmpname = try FileSystem.instance.tmpname(basename[0..@min(basename.len, 32)], &tmpname_buf, tgz_bytes.len);
{
var extract_destination = tmpdir.makeOpenPathIterable(std.mem.span(tmpname), .{}) catch |err| {
@@ -194,6 +203,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
void,
void{},
// for npm packages, the root dir is always "package"
+ // for github tarballs, the root dir is always the commit id
1,
true,
true,
@@ -206,11 +216,20 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
void,
void{},
// for npm packages, the root dir is always "package"
+ // for github tarballs, the root dir is always the commit id
1,
true,
false,
);
+ switch (this.resolution.tag) {
+ .github => {
+ resolved = try Archive.readFirstDirname(zlib_pool.data.list.items);
+ resolved = try this.package_manager.allocator.dupe(u8, resolved);
+ },
+ else => {},
+ }
+
if (PackageManager.verbose_install) {
Output.prettyErrorln(
"[{s}] Extracted<r>",
@@ -221,7 +240,11 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
Output.flush();
}
}
- var folder_name = this.package_manager.cachedNPMPackageFolderNamePrint(&abs_buf2, name, this.resolution.value.npm.version);
+ const folder_name = switch (this.resolution.tag) {
+ .npm => this.package_manager.cachedNPMPackageFolderNamePrint(&folder_name_buf, name, this.resolution.value.npm.version),
+ .github => PackageManager.cachedGitHubFolderNamePrint(&folder_name_buf, resolved),
+ else => unreachable,
+ };
if (folder_name.len == 0 or (folder_name.len == 1 and folder_name[0] == '/')) @panic("Tried to delete root and stopped it");
var cache_dir = this.cache_dir;
cache_dir.deleteTree(folder_name) catch {};
@@ -263,7 +286,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
// and get the fd path
var final_path = bun.getFdPath(
final_dir.fd,
- &abs_buf,
+ &final_path_buf,
) catch |err| {
Output.prettyErrorln(
"<r><red>Error {s}<r> failed to verify cache dir for {s}",
@@ -281,11 +304,60 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
defer index_dir.close();
index_dir.dir.symLink(
final_path,
- // trim "name@" from the prefix
- folder_name[name.len + 1 ..],
+ switch (this.resolution.tag) {
+ .github => folder_name["@GH@".len..],
+ // trim "name@" from the prefix
+ .npm => folder_name[name.len + 1 ..],
+ else => folder_name,
+ },
.{},
) catch break :create_index;
}
- return try FileSystem.instance.dirname_store.append(@TypeOf(final_path), final_path);
+ var json_path: []u8 = "";
+ var json_buf: []u8 = "";
+ var json_len: usize = 0;
+ switch (this.resolution.tag) {
+ .github => {
+ var json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| {
+ Output.prettyErrorln("<r><red>Error {s}<r> failed to open package.json for {s}", .{
+ @errorName(err),
+ name,
+ });
+ Global.crash();
+ };
+ defer json_file.close();
+ var json_stat = try json_file.stat();
+ json_buf = try this.package_manager.allocator.alloc(u8, json_stat.size + 64);
+ json_len = try json_file.preadAll(json_buf, 0);
+
+ json_path = bun.getFdPath(
+ json_file.handle,
+ &json_path_buf,
+ ) catch |err| {
+ Output.prettyErrorln(
+ "<r><red>Error {s}<r> failed to open package.json for {s}",
+ .{
+ @errorName(err),
+ name,
+ },
+ );
+ Global.crash();
+ };
+ // TODO remove extracted files not matching any globs under "files"
+ },
+ else => {},
+ }
+
+ const ret_final_path = try FileSystem.instance.dirname_store.append(@TypeOf(final_path), final_path);
+ const ret_json_path = try FileSystem.instance.dirname_store.append(@TypeOf(json_path), json_path);
+ return .{
+ .url = this.url,
+ .resolved = resolved,
+ .final_path = ret_final_path,
+ .json_path = ret_json_path,
+ .json_buf = json_buf,
+ .json_len = json_len,
+ .dependency_id = this.dependency_id,
+ };
}
diff --git a/src/install/install.zig b/src/install/install.zig
index 7195e1ac5..e7cc1e332 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -485,7 +485,7 @@ const Task = struct {
hasher.update(package_name);
hasher.update("@");
hasher.update(std.mem.asBytes(&package_version));
- return @as(u64, @truncate(u63, hasher.final())) | @as(u64, 1 << 63);
+ return @as(u64, @truncate(u62, hasher.final())) | @as(u64, 1 << 63);
}
pub fn forBinLink(package_id: PackageID) u64 {
@@ -497,7 +497,13 @@ const Task = struct {
_: Task.Tag,
name: string,
) u64 {
- return @as(u64, @truncate(u63, std.hash.Wyhash.hash(0, name)));
+ return @as(u64, @truncate(u62, std.hash.Wyhash.hash(0, name)));
+ }
+
+ pub fn forTarball(url: string) u64 {
+ var hasher = std.hash.Wyhash.init(0);
+ hasher.update(url);
+ return @as(u64, @truncate(u62, hasher.final())) | @as(u64, 1 << 62);
}
};
@@ -564,7 +570,7 @@ const Task = struct {
this.err = err;
this.status = Status.fail;
- this.data = .{ .extract = "" };
+ this.data = .{ .extract = .{} };
this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
return;
};
@@ -592,7 +598,7 @@ const Task = struct {
pub const Data = union {
package_manifest: Npm.PackageManifest,
- extract: string,
+ extract: ExtractData,
binlink: bool,
};
@@ -612,6 +618,16 @@ const Task = struct {
};
};
+pub const ExtractData = struct {
+ url: string = "",
+ resolved: string = "",
+ final_path: string = "",
+ json_path: string = "",
+ json_buf: []u8 = "",
+ json_len: usize = 0,
+ dependency_id: PackageID = invalid_package_id,
+};
+
const PackageInstall = struct {
cache_dir: std.fs.IterableDir,
destination_dir: std.fs.IterableDir,
@@ -1789,6 +1805,33 @@ pub const PackageManager = struct {
return this.allocator.create(NetworkTask) catch @panic("Memory allocation failure creating NetworkTask!");
}
+ fn allocGitHubURL(this: *const PackageManager, repository: Repository) !string {
+ var github_api_domain: string = "api.github.com";
+ if (this.env_loader.map.get("GITHUB_API_DOMAIN")) |api_domain| {
+ if (api_domain.len > 0) {
+ github_api_domain = api_domain;
+ }
+ }
+ return try std.fmt.allocPrint(
+ this.allocator,
+ "https://{s}/repos/{s}/{s}/tarball/{s}",
+ .{
+ github_api_domain,
+ this.lockfile.str(&repository.owner),
+ this.lockfile.str(&repository.repo),
+ this.lockfile.str(&repository.committish),
+ },
+ );
+ }
+
+ pub fn cachedGitHubFolderNamePrint(buf: []u8, resolved: string) stringZ {
+ return std.fmt.bufPrintZ(buf, "@GH@{s}", .{resolved}) catch unreachable;
+ }
+
+ pub fn cachedGitHubFolderName(this: *const PackageManager, repository: Repository) stringZ {
+ return cachedGitHubFolderNamePrint(&cached_package_folder_name_buf, this.lockfile.str(&repository.resolved));
+ }
+
// TODO: normalize to alphanumeric
pub fn cachedNPMPackageFolderNamePrint(this: *const PackageManager, buf: []u8, name: string, version: Semver.Version) stringZ {
const scope = this.scopeForPackageName(name);
@@ -1824,34 +1867,52 @@ pub const PackageManager = struct {
// TODO: normalize to alphanumeric
pub fn cachedNPMPackageFolderPrintBasename(buf: []u8, name: string, version: Semver.Version) stringZ {
- const pre_hex_int = version.tag.pre.hash;
- const build_hex_int = version.tag.build.hash;
-
- if (!version.tag.hasPre() and !version.tag.hasBuild()) {
- return std.fmt.bufPrintZ(buf, "{s}@{d}.{d}.{d}", .{ name, version.major, version.minor, version.patch }) catch unreachable;
- } else if (version.tag.hasPre() and version.tag.hasBuild()) {
- return std.fmt.bufPrintZ(
- buf,
- "{s}@{d}.{d}.{d}-{any}+{any}",
- .{ name, version.major, version.minor, version.patch, bun.fmt.hexIntLower(pre_hex_int), bun.fmt.hexIntUpper(build_hex_int) },
- ) catch unreachable;
- } else if (version.tag.hasPre()) {
+ if (version.tag.hasPre()) {
+ if (version.tag.hasBuild()) {
+ return std.fmt.bufPrintZ(
+ buf,
+ "{s}@{d}.{d}.{d}-{any}+{any}",
+ .{
+ name,
+ version.major,
+ version.minor,
+ version.patch,
+ bun.fmt.hexIntLower(version.tag.pre.hash),
+ bun.fmt.hexIntUpper(version.tag.build.hash),
+ },
+ ) catch unreachable;
+ }
return std.fmt.bufPrintZ(
buf,
"{s}@{d}.{d}.{d}-{any}",
- .{ name, version.major, version.minor, version.patch, bun.fmt.hexIntLower(pre_hex_int) },
+ .{
+ name,
+ version.major,
+ version.minor,
+ version.patch,
+ bun.fmt.hexIntLower(version.tag.pre.hash),
+ },
) catch unreachable;
- } else if (version.tag.hasBuild()) {
+ }
+ if (version.tag.hasBuild()) {
return std.fmt.bufPrintZ(
buf,
"{s}@{d}.{d}.{d}+{any}",
- .{ name, version.major, version.minor, version.patch, bun.fmt.hexIntUpper(build_hex_int) },
+ .{
+ name,
+ version.major,
+ version.minor,
+ version.patch,
+ bun.fmt.hexIntUpper(version.tag.build.hash),
+ },
) catch unreachable;
- } else {
- unreachable;
}
-
- unreachable;
+ return std.fmt.bufPrintZ(buf, "{s}@{d}.{d}.{d}", .{
+ name,
+ version.major,
+ version.minor,
+ version.patch,
+ }) catch unreachable;
}
pub fn isFolderInCache(this: *PackageManager, folder_path: stringZ) bool {
@@ -2026,7 +2087,7 @@ pub const PackageManager = struct {
manifest: *const Npm.PackageManifest,
find_result: Npm.PackageManifest.FindResult,
comptime successFn: SuccessFn,
- ) !?ResolvedPackageResult {
+ ) !ResolvedPackageResult {
// Was this package already allocated? Let's reuse the existing one.
if (this.lockfile.getPackageID(
@@ -2619,7 +2680,7 @@ pub const PackageManager = struct {
this.enqueueNetworkTask(network_task);
}
- std.debug.assert(task_id != 0);
+ if (comptime Environment.isDebug) std.debug.assert(task_id != 0);
var manifest_entry_parse = try this.task_queue.getOrPutContext(this.allocator, task_id, .{});
if (!manifest_entry_parse.found_existing) {
@@ -2634,6 +2695,32 @@ pub const PackageManager = struct {
}
return;
},
+ .github => {
+ if (dependency.behavior.isPeer()) return;
+ const dep = dependency.version.value.github;
+ const res = Resolution{
+ .tag = .github,
+ .value = .{
+ .github = dep,
+ },
+ };
+ if (this.lockfile.getPackageID(name_hash, null, res)) |pkg_id| {
+ successFn(this, id, pkg_id);
+ return;
+ }
+ const package = try this.lockfile.appendPackage(.{
+ .name = name,
+ .name_hash = name_hash,
+ .resolution = res,
+ });
+ const url = try this.allocGitHubURL(dep);
+ const task_id = Task.Id.forTarball(url);
+ if (try this.generateNetworkTaskForTarball(task_id, url, package)) |network_task| {
+ network_task.callback.extract.dependency_id = id;
+ this.setPreinstallState(package.meta.id, this.lockfile, .extracting);
+ this.enqueueNetworkTask(network_task);
+ }
+ },
.symlink, .workspace => {
const _result = this.getOrPutResolvedPackage(
alias,
@@ -2704,7 +2791,6 @@ pub const PackageManager = struct {
) catch unreachable;
}
},
-
else => {},
}
}
@@ -2862,6 +2948,67 @@ pub const PackageManager = struct {
}
}
+ fn enqueueDependenciesFromJSON(
+ manager: *PackageManager,
+ package_id: PackageID,
+ data: ExtractData,
+ comptime log_level: Options.LogLevel,
+ ) void {
+ var package = manager.lockfile.packages.get(package_id);
+ switch (package.resolution.tag) {
+ .github => {
+ defer {
+ manager.allocator.free(data.resolved);
+ manager.allocator.free(data.json_buf);
+ }
+ const package_name = package.name;
+ const package_name_hash = package.name_hash;
+ const package_json_source = logger.Source.initPathString(
+ data.json_path,
+ data.json_buf[0..data.json_len],
+ );
+
+ Lockfile.Package.parse(
+ manager.lockfile,
+ &package,
+ manager.allocator,
+ manager.log,
+ package_json_source,
+ void,
+ {},
+ Features.npm,
+ ) catch |err| {
+ if (comptime log_level != .silent) {
+ const string_buf = manager.lockfile.buffers.string_bytes.items;
+ Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{
+ package.resolution.fmtURL(&manager.options, package_name.slice(string_buf), string_buf),
+ @errorName(err),
+ });
+ }
+ Global.crash();
+ };
+ // package.json might contain a different name than already appended
+ package.name = package_name;
+ package.name_hash = package_name_hash;
+ // stored resolved ID from committish
+ var builder = manager.lockfile.stringBuilder();
+ builder.count(data.resolved);
+ builder.allocate() catch unreachable;
+ package.resolution.value.github.resolved = builder.append(String, data.resolved);
+ builder.clamp();
+ manager.lockfile.packages.set(package_id, package);
+
+ if (package.dependencies.len > 0) {
+ manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch unreachable;
+ }
+ if (data.dependency_id < manager.lockfile.buffers.resolutions.items.len) {
+ assignResolution(manager, data.dependency_id, package_id);
+ }
+ },
+ else => {},
+ }
+ }
+
const CacheDir = struct { path: string, is_node_modules: bool };
pub fn fetchCacheDirectoryPath(
env_loader: *DotEnv.Loader,
@@ -3314,7 +3461,7 @@ pub const PackageManager = struct {
manager.setPreinstallState(package_id, manager.lockfile, .done);
if (comptime @TypeOf(callbacks.onExtract) != void) {
- callbacks.onExtract(extract_ctx, package_id, comptime log_level);
+ callbacks.onExtract(extract_ctx, package_id, task.data.extract, comptime log_level);
}
if (comptime log_level.showProgress()) {
@@ -3341,7 +3488,7 @@ pub const PackageManager = struct {
manager.network_resolve_batch = .{};
if (comptime log_level.showProgress()) {
- if (comptime ExtractCompletionContext == void or (@hasField(@TypeOf(callbacks), "progress_bar") and callbacks.progress_bar == true)) {
+ if (@hasField(@TypeOf(callbacks), "progress_bar") and callbacks.progress_bar == true) {
const completed_items = manager.total_tasks - manager.pending_tasks;
if (completed_items != manager.downloads_node.?.unprotected_completed_items or has_updated_this_run) {
manager.downloads_node.?.setCompletedItems(completed_items);
@@ -5517,16 +5664,17 @@ pub const PackageManager = struct {
pub fn installEnqueuedPackages(
this: *PackageInstaller,
package_id: PackageID,
+ data: ExtractData,
comptime log_level: Options.LogLevel,
) void {
const name = this.lockfile.str(&this.names[package_id]);
const resolution = this.resolutions[package_id];
-
- if (this.manager.task_queue.fetchRemove(Task.Id.forNPMPackage(
- Task.Tag.extract,
- name,
- resolution.value.npm.version,
- ))) |removed| {
+ const task_id = switch (resolution.tag) {
+ .github => Task.Id.forTarball(data.url),
+ .npm => Task.Id.forNPMPackage(Task.Tag.extract, name, resolution.value.npm.version),
+ else => unreachable,
+ };
+ if (this.manager.task_queue.fetchRemove(task_id)) |removed| {
var callbacks = removed.value;
defer callbacks.deinit(this.manager.allocator);
@@ -5572,6 +5720,10 @@ pub const PackageManager = struct {
installer.cache_dir_subpath = this.manager.cachedNPMPackageFolderName(name, resolution.value.npm.version);
installer.cache_dir = this.manager.getCacheDirectory();
},
+ .github => {
+ installer.cache_dir_subpath = this.manager.cachedGitHubFolderName(resolution.value.github);
+ installer.cache_dir = this.manager.getCacheDirectory();
+ },
.folder => {
const folder = resolution.value.folder.slice(buf);
// Handle when a package depends on itself via file:
@@ -5735,6 +5887,15 @@ pub const PackageManager = struct {
.fail => |cause| {
if (cause.isPackageMissingFromCache()) {
switch (resolution.tag) {
+ .github => {
+ this.manager.enqueueTarballForDownload(
+ package_id,
+ resolution.value.github,
+ .{
+ .node_modules_folder = @intCast(u32, this.node_modules_folder.dir.fd),
+ },
+ );
+ },
.npm => {
std.debug.assert(resolution.value.npm.url.len() > 0);
this.manager.enqueuePackageForDownload(
@@ -5826,6 +5987,34 @@ pub const PackageManager = struct {
}
}
+ pub fn enqueueTarballForDownload(
+ this: *PackageManager,
+ package_id: PackageID,
+ repository: Repository,
+ task_context: TaskCallbackContext,
+ ) void {
+ const url = this.allocGitHubURL(repository) catch unreachable;
+ const task_id = Task.Id.forTarball(url);
+ var task_queue = this.task_queue.getOrPut(this.allocator, task_id) catch unreachable;
+ if (!task_queue.found_existing) {
+ task_queue.value_ptr.* = .{};
+ }
+
+ task_queue.value_ptr.append(
+ this.allocator,
+ task_context,
+ ) catch unreachable;
+
+ if (!task_queue.found_existing) {
+ if (this.generateNetworkTaskForTarball(task_id, url, this.lockfile.packages.get(package_id)) catch unreachable) |task| {
+ task.schedule(&this.network_tarball_batch);
+ if (this.network_tarball_batch.len > 0) {
+ _ = this.scheduleNetworkTasks();
+ }
+ }
+ }
+ }
+
pub fn installPackages(
this: *PackageManager,
lockfile_: *Lockfile,
@@ -6385,15 +6574,19 @@ pub const PackageManager = struct {
Output.flush();
}
- {
- while (manager.pending_tasks > 0) : (manager.sleep()) {
- try manager.runTasks(void, void{}, .{
- .onExtract = void{},
+ while (manager.pending_tasks > 0) : (manager.sleep()) {
+ try manager.runTasks(
+ *PackageManager,
+ manager,
+ .{
+ .onExtract = PackageManager.enqueueDependenciesFromJSON,
.onResolve = void{},
.onPackageManifestError = void{},
.onPackageDownloadError = void{},
- }, log_level);
- }
+ .progress_bar = true,
+ },
+ log_level,
+ );
}
if (comptime log_level.showProgress()) {
diff --git a/src/install/repository.zig b/src/install/repository.zig
index 109e24ce9..a3e8dcf38 100644
--- a/src/install/repository.zig
+++ b/src/install/repository.zig
@@ -1,16 +1,17 @@
+const Environment = @import("../env.zig");
const PackageManager = @import("./install.zig").PackageManager;
const Semver = @import("./semver.zig");
const ExternalString = Semver.ExternalString;
const String = Semver.String;
const std = @import("std");
-const GitSHA = String;
const string = @import("../string_types.zig").string;
-const Environment = @import("../env.zig");
+const GitSHA = String;
pub const Repository = extern struct {
owner: String = String{},
repo: String = String{},
committish: GitSHA = GitSHA{},
+ resolved: String = String{},
pub fn order(lhs: *const Repository, rhs: *const Repository, lhs_buf: []const u8, rhs_buf: []const u8) std.math.Order {
const owner_order = lhs.owner.order(&rhs.owner, lhs_buf, rhs_buf);
@@ -25,6 +26,7 @@ pub const Repository = extern struct {
builder.count(this.owner.slice(buf));
builder.count(this.repo.slice(buf));
builder.count(this.committish.slice(buf));
+ builder.count(this.resolved.slice(buf));
}
pub fn clone(this: Repository, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) Repository {
@@ -32,6 +34,7 @@ pub const Repository = extern struct {
.owner = builder.append(String, this.owner.slice(buf)),
.repo = builder.append(String, this.repo.slice(buf)),
.committish = builder.append(GitSHA, this.committish.slice(buf)),
+ .resolved = builder.append(GitSHA, this.resolved.slice(buf)),
};
}
@@ -57,6 +60,7 @@ pub const Repository = extern struct {
try writer.writeAll(":");
try writer.writeAll(formatter.repository.owner.slice(formatter.buf));
+ try writer.writeAll("/");
try writer.writeAll(formatter.repository.repo.slice(formatter.buf));
if (!formatter.repository.committish.isEmpty()) {
diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig
index df260125f..26eb5c5a3 100644
--- a/src/libarchive/libarchive.zig
+++ b/src/libarchive/libarchive.zig
@@ -468,6 +468,36 @@ pub const Archive = struct {
}
}
+ pub fn readFirstDirname(
+ file_buffer: []const u8,
+ ) !string {
+ var entry: *lib.archive_entry = undefined;
+
+ var stream: BufferReadStream = undefined;
+ stream.init(file_buffer);
+ defer stream.deinit();
+ _ = stream.openRead();
+ var archive = stream.archive;
+
+ return brk: {
+ while (true) {
+ const r = @intToEnum(Status, lib.archive_read_next_header(archive, &entry));
+
+ switch (r) {
+ Status.eof => break,
+ Status.retry => continue,
+ Status.failed, Status.fatal => return error.Fail,
+ else => {
+ var pathname: [:0]const u8 = std.mem.sliceTo(lib.archive_entry_pathname(entry).?, 0);
+ var tokenizer = std.mem.tokenize(u8, std.mem.span(pathname), std.fs.path.sep_str);
+
+ if (tokenizer.next()) |name| break :brk name;
+ },
+ }
+ }
+ };
+ }
+
pub fn extractToDir(
file_buffer: []const u8,
dir_: std.fs.IterableDir,
diff --git a/test/bun.js/install/bun-install.test.ts b/test/bun.js/install/bun-install.test.ts
index de9fb5755..cdad571fc 100644
--- a/test/bun.js/install/bun-install.test.ts
+++ b/test/bun.js/install/bun-install.test.ts
@@ -15,6 +15,37 @@ import { tmpdir } from "os";
let handler, package_dir, requested, server;
+function dummyRegistry(urls, version = "0.0.2") {
+ return async (request) => {
+ urls.push(request.url);
+ expect(request.method).toBe("GET");
+ if (request.url.endsWith(".tgz")) {
+ return new Response(file(join(import.meta.dir, "tarball.tgz")));
+ }
+ expect(request.headers.get("accept")).toBe(
+ "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
+ );
+ expect(request.headers.get("npm-auth-type")).toBe(null);
+ expect(await request.text()).toBe("");
+ const name = request.url.slice(request.url.lastIndexOf("/") + 1);
+ return new Response(JSON.stringify({
+ name,
+ versions: {
+ [version]: {
+ name,
+ version,
+ dist: {
+ tarball: `${request.url}.tgz`,
+ },
+ },
+ },
+ "dist-tags": {
+ latest: version,
+ },
+ }));
+ };
+}
+
async function readdirSorted(path: PathLike): Promise<string[]> {
const results = await readdir(path);
results.sort();
@@ -557,37 +588,6 @@ it("should handle life-cycle scripts within workspaces", async () => {
await access(join(package_dir, "bun.lockb"));
});
-function dummyRegistry(urls, version = "0.0.2") {
- return async (request) => {
- urls.push(request.url);
- expect(request.method).toBe("GET");
- if (request.url.endsWith(".tgz")) {
- return new Response(file(join(import.meta.dir, "tarball.tgz")));
- }
- expect(request.headers.get("accept")).toBe(
- "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
- );
- expect(request.headers.get("npm-auth-type")).toBe(null);
- expect(await request.text()).toBe("");
- const name = request.url.slice(request.url.lastIndexOf("/") + 1);
- return new Response(JSON.stringify({
- name,
- versions: {
- [version]: {
- name,
- version,
- dist: {
- tarball: `${request.url}.tgz`,
- },
- },
- },
- "dist-tags": {
- latest: version,
- },
- }));
- };
-}
-
it("should handle ^0 in dependencies", async () => {
const urls: string[] = [];
handler = dummyRegistry(urls);
@@ -1192,3 +1192,336 @@ it("should not reinstall aliased dependencies", async () => {
});
await access(join(package_dir, "bun.lockb"));
});
+
+it("should handle GitHub URL in dependencies (user/repo)", async () => {
+ const urls: string[] = [];
+ handler = dummyRegistry(urls);
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "Foo",
+ version: "0.0.1",
+ dependencies: {
+ "uglify": "mishoo/UglifyJS",
+ },
+ }),
+ );
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + uglify@github:mishoo/UglifyJS",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
+ ".cache",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "uglifyjs",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ var package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ await access(join(package_dir, "bun.lockb"));
+});
+
+it("should handle GitHub URL in dependencies (user/repo#commit-id)", async () => {
+ const urls: string[] = [];
+ handler = dummyRegistry(urls);
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "Foo",
+ version: "0.0.1",
+ dependencies: {
+ "uglify": "mishoo/UglifyJS#e219a9a",
+ },
+ }),
+ );
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + uglify@github:mishoo/UglifyJS#e219a9a",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
+ ".cache",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "uglifyjs",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache"))).toEqual([
+ "@GH@mishoo-UglifyJS-e219a9a",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([
+ "mishoo-UglifyJS-e219a9a",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".cache", "uglify", "mishoo-UglifyJS-e219a9a"))).toBe(
+ join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"),
+ );
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ var package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ expect(package_json.version).toBe("3.14.1");
+ await access(join(package_dir, "bun.lockb"));
+});
+
+it("should handle GitHub URL in dependencies (user/repo#tag)", async () => {
+ const urls: string[] = [];
+ handler = dummyRegistry(urls);
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "Foo",
+ version: "0.0.1",
+ dependencies: {
+ "uglify": "mishoo/UglifyJS#v3.14.1",
+ },
+ }),
+ );
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + uglify@github:mishoo/UglifyJS#v3.14.1",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
+ ".cache",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "uglifyjs",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache"))).toEqual([
+ "@GH@mishoo-UglifyJS-e219a9a",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([
+ "mishoo-UglifyJS-e219a9a",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".cache", "uglify", "mishoo-UglifyJS-e219a9a"))).toBe(
+ join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"),
+ );
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ var package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ expect(package_json.version).toBe("3.14.1");
+ await access(join(package_dir, "bun.lockb"));
+});
+
+it("should handle GitHub URL in dependencies (github:user/repo#tag)", async () => {
+ const urls: string[] = [];
+ handler = dummyRegistry(urls);
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "Foo",
+ version: "0.0.1",
+ dependencies: {
+ "uglify": "github:mishoo/UglifyJS#v3.14.1",
+ },
+ }),
+ );
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + uglify@github:mishoo/UglifyJS#v3.14.1",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
+ ".cache",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "uglifyjs",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache"))).toEqual([
+ "@GH@mishoo-UglifyJS-e219a9a",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([
+ "mishoo-UglifyJS-e219a9a",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".cache", "uglify", "mishoo-UglifyJS-e219a9a"))).toBe(
+ join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"),
+ );
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ var package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ expect(package_json.version).toBe("3.14.1");
+ await access(join(package_dir, "bun.lockb"));
+});
+
+it("should handle GitHub URL in dependencies (https://github.com/user/repo.git)", async () => {
+ const urls: string[] = [];
+ handler = dummyRegistry(urls);
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "Foo",
+ version: "0.0.1",
+ dependencies: {
+ "uglify": "https://github.com/mishoo/UglifyJS.git",
+ },
+ }),
+ );
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ const out = await new Response(stdout).text();
+ expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + uglify@github:mishoo/UglifyJS",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
+ ".cache",
+ "uglify",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "uglifyjs",
+ ]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ var package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ await access(join(package_dir, "bun.lockb"));
+});