aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/install/dependency.zig115
-rw-r--r--src/install/install.zig92
-rw-r--r--src/install/lockfile.zig39
-rw-r--r--src/install/repository.zig23
-rw-r--r--test/bun.js/install/bun-add.test.ts102
-rw-r--r--test/bun.js/install/bun-install.test.ts61
6 files changed, 339 insertions, 93 deletions
diff --git a/src/install/dependency.zig b/src/install/dependency.zig
index 3e5ae8a77..902195170 100644
--- a/src/install/dependency.zig
+++ b/src/install/dependency.zig
@@ -159,6 +159,68 @@ pub fn toExternal(this: Dependency) External {
return bytes;
}
+pub inline fn isSCPLikePath(dependency: string) bool {
+ // Shortest valid expression: h:p
+ if (dependency.len < 3) return false;
+
+ var at_index: ?usize = null;
+
+ for (dependency) |c, i| {
+ switch (c) {
+ '@' => {
+ if (at_index == null) at_index = i;
+ },
+ ':' => {
+ if (strings.hasPrefixComptime(dependency[i..], "://")) return false;
+ return i > if (at_index) |index| index + 1 else 0;
+ },
+ '/' => return if (at_index) |index| i > index + 1 else false,
+ else => {},
+ }
+ }
+
+ return false;
+}
+
+pub inline fn isGitHubRepoPath(dependency: string) bool {
+ // Shortest valid expression: u/r
+ if (dependency.len < 3) return false;
+
+ var hash_index: usize = 0;
+ var slash_index: usize = 0;
+
+ for (dependency) |c, i| {
+ switch (c) {
+ '/' => {
+ if (i == 0) return false;
+ if (slash_index > 0) return false;
+ slash_index = i;
+ },
+ '#' => {
+ if (i == 0) return false;
+ if (hash_index > 0) return false;
+ if (slash_index == 0) return false;
+ hash_index = i;
+ },
+ // Not allowed in username
+ '.', '_' => {
+ if (slash_index == 0) return false;
+ },
+ // Must be alphanumeric
+ '-', 'a'...'z', 'A'...'Z', '0'...'9' => {},
+ else => return false,
+ }
+ }
+
+ return hash_index != dependency.len - 1 and slash_index > 0 and slash_index != dependency.len - 1;
+}
+
+// This won't work for query string params, but I'll let someone file an issue
+// before I add that.
+pub inline fn isTarball(dependency: string) bool {
+ return strings.endsWithComptime(dependency, ".tgz") or strings.endsWithComptime(dependency, ".tar.gz");
+}
+
pub const Version = struct {
tag: Dependency.Version.Tag = .uninitialized,
literal: String = .{},
@@ -278,45 +340,6 @@ pub const Version = struct {
return @enumToInt(this) < 3;
}
- pub inline fn isGitHubRepoPath(dependency: string) bool {
- // Shortest valid expression: u/r
- if (dependency.len < 3) return false;
-
- var hash_index: usize = 0;
- var slash_index: usize = 0;
-
- for (dependency) |c, i| {
- switch (c) {
- '/' => {
- if (i == 0) return false;
- if (slash_index > 0) return false;
- slash_index = i;
- },
- '#' => {
- if (i == 0) return false;
- if (hash_index > 0) return false;
- if (slash_index == 0) return false;
- hash_index = i;
- },
- // Not allowed in username
- '.', '_' => {
- if (slash_index == 0) return false;
- },
- // Must be alphanumeric
- '-', 'a'...'z', 'A'...'Z', '0'...'9' => {},
- else => return false,
- }
- }
-
- return hash_index != dependency.len - 1 and slash_index > 0 and slash_index != dependency.len - 1;
- }
-
- // this won't work for query string params
- // i'll let someone file an issue before I add that
- pub inline fn isTarball(dependency: string) bool {
- return strings.endsWithComptime(dependency, ".tgz") or strings.endsWithComptime(dependency, ".tar.gz");
- }
-
pub fn infer(dependency: string) Tag {
// empty string means `latest`
if (dependency.len == 0) return .dist_tag;
@@ -482,9 +505,11 @@ pub const Version = struct {
// verilog
// verilog.tar.gz
// verilog/repo
+ // virt@example.com:repo.git
'v' => {
if (isTarball(dependency)) return .tarball;
if (isGitHubRepoPath(dependency)) return .github;
+ if (isSCPLikePath(dependency)) return .git;
if (dependency.len == 1) return .dist_tag;
return switch (dependency[1]) {
'0'...'9' => .npm,
@@ -514,6 +539,8 @@ pub const Version = struct {
// user/repo
// user/repo#main
if (isGitHubRepoPath(dependency)) return .github;
+ // git@example.com:path/to/repo.git
+ if (isSCPLikePath(dependency)) return .git;
// beta
return .dist_tag;
}
@@ -563,14 +590,6 @@ pub fn eql(
return a.name_hash == b.name_hash and a.name.len() == b.name.len() and a.version.eql(&b.version, lhs_buf, rhs_buf);
}
-pub fn eqlResolved(a: *const Dependency, b: *const Dependency) bool {
- if (a.isNPM() and b.tag.isNPM()) {
- return a.resolution == b.resolution;
- }
-
- return @as(Dependency.Version.Tag, a.version) == @as(Dependency.Version.Tag, b.version) and a.resolution == b.resolution;
-}
-
pub inline fn parse(
allocator: std.mem.Allocator,
alias: String,
@@ -749,7 +768,7 @@ pub fn parseWithTag(
}
}
- if (comptime Environment.allow_assert) std.debug.assert(Version.Tag.isGitHubRepoPath(input));
+ if (comptime Environment.allow_assert) std.debug.assert(isGitHubRepoPath(input));
var hash_index: usize = 0;
var slash_index: usize = 0;
diff --git a/src/install/install.zig b/src/install/install.zig
index e0e509f16..c4de35fe9 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -34,8 +34,6 @@ const HeaderBuilder = @import("bun").HTTP.HeaderBuilder;
const Fs = @import("../fs.zig");
const FileSystem = Fs.FileSystem;
const Lock = @import("../lock.zig").Lock;
-var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
-var path_buf2: [bun.MAX_PATH_BYTES]u8 = undefined;
const URL = @import("../url.zig").URL;
const AsyncHTTP = @import("bun").HTTP.AsyncHTTP;
const HTTPChannel = @import("bun").HTTP.HTTPChannel;
@@ -600,14 +598,27 @@ const Task = struct {
},
.git_clone => {
const manager = this.package_manager;
- const dir = Repository.download(
+ const name = this.request.git_clone.name.slice();
+ const url = this.request.git_clone.url.slice();
+ const dir = brk: {
+ if (Repository.tryHTTPS(url)) |https| break :brk Repository.download(
+ manager.allocator,
+ manager.env,
+ manager.log,
+ manager.getCacheDirectory().dir,
+ this.id,
+ name,
+ https,
+ ) catch null;
+ break :brk null;
+ } orelse Repository.download(
manager.allocator,
manager.env,
manager.log,
manager.getCacheDirectory().dir,
this.id,
- this.request.git_clone.name.slice(),
- this.request.git_clone.url.slice(),
+ name,
+ url,
) catch |err| {
this.err = err;
this.status = Status.fail;
@@ -1907,7 +1918,8 @@ pub const PackageManager = struct {
if (this.options.log_level != .silent) {
const elapsed = timer.read();
if (elapsed > std.time.ns_per_ms * 100) {
- var cache_dir_path = bun.getFdPath(cache_directory.dir.fd, &path_buf) catch "it's";
+ var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ const cache_dir_path = bun.getFdPath(cache_directory.dir.fd, &path_buf) catch "it";
Output.prettyErrorln(
"<r><yellow>warn<r>: Slow filesystem detected. If {s} is a network drive, consider setting $BUN_INSTALL_CACHE_DIR to a local folder.",
.{cache_dir_path},
@@ -2379,21 +2391,33 @@ pub const PackageManager = struct {
const SuccessFn = *const fn (*PackageManager, DependencyID, PackageID) void;
const FailFn = *const fn (*PackageManager, *const Dependency, PackageID, anyerror) void;
fn assignResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void {
+ const buffers = &this.lockfile.buffers;
if (comptime Environment.allow_assert) {
- std.debug.assert(dependency_id < this.lockfile.buffers.resolutions.items.len);
+ std.debug.assert(dependency_id < buffers.resolutions.items.len);
std.debug.assert(package_id < this.lockfile.packages.len);
- std.debug.assert(this.lockfile.buffers.resolutions.items[dependency_id] == invalid_package_id);
+ std.debug.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
+ }
+ buffers.resolutions.items[dependency_id] = package_id;
+ var dep = &buffers.dependencies.items[dependency_id];
+ if (dep.name.isEmpty()) {
+ dep.name = this.lockfile.packages.items(.name)[package_id];
+ dep.name_hash = this.lockfile.packages.items(.name_hash)[package_id];
}
- this.lockfile.buffers.resolutions.items[dependency_id] = package_id;
}
fn assignRootResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void {
+ const buffers = &this.lockfile.buffers;
if (comptime Environment.allow_assert) {
- std.debug.assert(dependency_id < this.lockfile.buffers.resolutions.items.len);
+ std.debug.assert(dependency_id < buffers.resolutions.items.len);
std.debug.assert(package_id < this.lockfile.packages.len);
- std.debug.assert(this.lockfile.buffers.resolutions.items[dependency_id] == invalid_package_id);
+ std.debug.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
+ }
+ buffers.resolutions.items[dependency_id] = package_id;
+ var dep = &buffers.dependencies.items[dependency_id];
+ if (dep.name.isEmpty()) {
+ dep.name = this.lockfile.packages.items(.name)[package_id];
+ dep.name_hash = this.lockfile.packages.items(.name_hash)[package_id];
}
- this.lockfile.buffers.resolutions.items[dependency_id] = package_id;
}
fn getOrPutResolvedPackage(
@@ -5478,6 +5502,13 @@ pub const PackageManager = struct {
pub const Array = std.BoundedArray(UpdateRequest, 64);
+ pub inline fn matches(this: PackageManager.UpdateRequest, dependency: Dependency, string_buf: []const u8) bool {
+ return this.name_hash == if (this.name.len == 0)
+ String.Builder.stringHash(dependency.version.literal.slice(string_buf))
+ else
+ dependency.name_hash;
+ }
+
pub fn parse(
allocator: std.mem.Allocator,
log: *logger.Log,
@@ -5492,7 +5523,7 @@ pub const PackageManager = struct {
var input = std.mem.trim(u8, positional, " \n\r\t");
switch (op) {
.link, .unlink => if (!strings.hasPrefixComptime(input, "link:")) {
- input = std.fmt.allocPrint(allocator, "link:{s}", .{input}) catch unreachable;
+ input = std.fmt.allocPrint(allocator, "{0s}@link:{0s}", .{input}) catch unreachable;
},
else => {},
}
@@ -5526,6 +5557,19 @@ pub const PackageManager = struct {
});
Global.crash();
};
+ if (alias != null and version.tag == .git) {
+ if (Dependency.parseWithOptionalTag(
+ allocator,
+ placeholder,
+ input,
+ null,
+ &SlicedString.init(input, input),
+ log,
+ )) |ver| {
+ alias = null;
+ version = ver;
+ }
+ }
if (switch (version.tag) {
.dist_tag => version.value.dist_tag.name.eql(placeholder, input, input),
.npm => version.value.npm.name.eql(placeholder, input, input),
@@ -5538,18 +5582,16 @@ pub const PackageManager = struct {
}
var request = UpdateRequest{
- .name = allocator.dupe(u8, alias orelse switch (version.tag) {
- .dist_tag => version.value.dist_tag.name,
- .github => version.value.github.repo,
- .npm => version.value.npm.name,
- .symlink => version.value.symlink,
- else => version.literal,
- }.slice(input)) catch unreachable,
- .is_aliased = alias != null,
.version = version,
.version_buf = input,
};
- request.name_hash = String.Builder.stringHash(request.name);
+ if (alias) |name| {
+ request.is_aliased = true;
+ request.name = allocator.dupe(u8, name) catch unreachable;
+ request.name_hash = String.Builder.stringHash(name);
+ } else {
+ request.name_hash = String.Builder.stringHash(version.literal.slice(input));
+ }
for (update_requests.constSlice()) |*prev| {
if (prev.name_hash == request.name_hash and request.name.len == prev.name.len) continue :outer;
@@ -6924,11 +6966,11 @@ pub const PackageManager = struct {
_ = manager.getTemporaryDirectory();
}
manager.enqueueDependencyList(root.dependencies, true);
+ } else {
+ // Anything that needs to be downloaded from an update needs to be scheduled here
+ manager.drainDependencyList();
}
- // Anything that needs to be downloaded from an update needs to be scheduled here
- manager.drainDependencyList();
-
if (manager.pending_tasks > 0) {
if (root.dependencies.len > 0) {
_ = manager.getCacheDirectory();
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index d48d2b13c..ed60f55cd 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -698,15 +698,15 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
const res_list = slice.items(.resolutions)[0];
const root_deps: []const Dependency = dep_list.get(new.buffers.dependencies.items);
const resolved_ids: []const PackageID = res_list.get(new.buffers.resolutions.items);
+ const string_buf = new.buffers.string_bytes.items;
for (updates) |*update| {
if (update.resolution.tag == .uninitialized) {
- const name_hash = String.Builder.stringHash(update.name);
for (root_deps) |dep, i| {
- if (dep.name_hash == name_hash) {
+ if (update.matches(dep, string_buf)) {
const package_id = resolved_ids[i];
if (package_id > new.packages.len) continue;
- update.version_buf = new.buffers.string_bytes.items;
+ update.version_buf = string_buf;
update.version = dep.version;
update.resolution = resolutions[package_id];
update.resolved_name = names[package_id];
@@ -988,10 +988,9 @@ pub const Printer = struct {
const package_name = dependency.name.slice(string_buf);
if (this.updates.len > 0) {
- const name_hash = dependency.name_hash;
for (this.updates) |update, update_id| {
if (update.failed) return;
- if (update.name.len == package_name.len and name_hash == update.name_hash) {
+ if (update.matches(dependency, string_buf)) {
if (id_map[update_id] == invalid_package_id) {
id_map[update_id] = @truncate(DependencyID, dep_id);
}
@@ -1027,11 +1026,9 @@ pub const Printer = struct {
const package_name = dependency.name.slice(string_buf);
if (this.updates.len > 0) {
- const name_hash = dependency.name_hash;
for (this.updates) |update, update_id| {
if (update.failed) return;
-
- if (update.name.len == package_name.len and name_hash == update.name_hash) {
+ if (update.matches(dependency, string_buf)) {
if (id_map[update_id] == invalid_package_id) {
id_map[update_id] = @truncate(DependencyID, dep_id);
}
@@ -1356,14 +1353,24 @@ pub fn verifyResolutions(this: *Lockfile, local_features: Features, remote_featu
else
remote_features,
)) continue;
- if (log_level != .silent)
- Output.prettyErrorln(
- "<r><red>error<r><d>:<r> <b>{s}<r><d>@<b>{}<r><d> failed to resolve<r>\n",
- .{
- failed_dep.name.slice(string_buf),
- failed_dep.version.literal.fmt(string_buf),
- },
- );
+ if (log_level != .silent) {
+ if (failed_dep.name.isEmpty()) {
+ Output.prettyErrorln(
+ "<r><red>error<r><d>:<r> <b>{}<r><d> failed to resolve<r>\n",
+ .{
+ failed_dep.version.literal.fmt(string_buf),
+ },
+ );
+ } else {
+ Output.prettyErrorln(
+ "<r><red>error<r><d>:<r> <b>{s}<r><d>@<b>{}<r><d> failed to resolve<r>\n",
+ .{
+ failed_dep.name.slice(string_buf),
+ failed_dep.version.literal.fmt(string_buf),
+ },
+ );
+ }
+ }
// track this so we can log each failure instead of just the first
any_failed = true;
}
diff --git a/src/install/repository.zig b/src/install/repository.zig
index a1a8396c2..557953d76 100644
--- a/src/install/repository.zig
+++ b/src/install/repository.zig
@@ -1,6 +1,7 @@
const bun = @import("bun");
const Global = bun.Global;
const logger = bun.logger;
+const Dependency = @import("./dependency.zig");
const DotEnv = @import("../env_loader.zig");
const Environment = @import("../env.zig");
const FileSystem = @import("../fs.zig").FileSystem;
@@ -12,6 +13,7 @@ const ExternalString = Semver.ExternalString;
const String = Semver.String;
const std = @import("std");
const string = @import("../string_types.zig").string;
+const strings = @import("../string_immutable.zig");
const GitSHA = String;
threadlocal var final_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
@@ -80,11 +82,14 @@ pub const Repository = extern struct {
if (Environment.allow_assert) std.debug.assert(formatter.label.len > 0);
try writer.writeAll(formatter.label);
+ const repo = formatter.repository.repo.slice(formatter.buf);
if (!formatter.repository.owner.isEmpty()) {
try writer.writeAll(formatter.repository.owner.slice(formatter.buf));
try writer.writeAll("/");
+ } else if (Dependency.isSCPLikePath(repo)) {
+ try writer.writeAll("ssh://");
}
- try writer.writeAll(formatter.repository.repo.slice(formatter.buf));
+ try writer.writeAll(repo);
if (!formatter.repository.resolved.isEmpty()) {
try writer.writeAll("#");
@@ -116,6 +121,22 @@ pub const Repository = extern struct {
return error.InstallFailed;
}
+ pub fn tryHTTPS(url: string) ?string {
+ if (strings.hasPrefixComptime(url, "ssh://")) {
+ final_path_buf[0.."https".len].* = "https".*;
+ std.mem.copy(u8, final_path_buf["https".len..], url["ssh".len..]);
+ return final_path_buf[0..(url.len - "ssh".len + "https".len)];
+ }
+ if (Dependency.isSCPLikePath(url)) {
+ final_path_buf[0.."https://".len].* = "https://".*;
+ var rest = final_path_buf["https://".len..];
+ std.mem.copy(u8, rest, url);
+ if (strings.indexOfChar(rest, ':')) |colon| rest[colon] = '/';
+ return final_path_buf[0..(url.len + "https://".len)];
+ }
+ return null;
+ }
+
pub fn download(
allocator: std.mem.Allocator,
env: *DotEnv.Loader,
diff --git a/test/bun.js/install/bun-add.test.ts b/test/bun.js/install/bun-add.test.ts
index ef8025a6a..76c4f39c4 100644
--- a/test/bun.js/install/bun-add.test.ts
+++ b/test/bun.js/install/bun-add.test.ts
@@ -63,7 +63,7 @@ it("should add existing package", async () => {
const out = await new Response(stdout).text();
expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
"",
- ` installed file:${add_path}@${add_path}`,
+ ` installed foo@${add_path}`,
"",
"",
" 1 packages installed",
@@ -99,7 +99,7 @@ it("should reject missing package", async () => {
const err = await new Response(stderr).text();
expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual([
"bun add",
- `error: file:${add_path}@file:${add_path} failed to resolve`,
+ `error: file:${add_path} failed to resolve`,
"",
]);
expect(stdout).toBeDefined();
@@ -140,7 +140,7 @@ it("should reject invalid path without segfault", async () => {
const err = await new Response(stderr).text();
expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual([
"bun add",
- `error: file://${add_path}@file://${add_path} failed to resolve`,
+ `error: file://${add_path} failed to resolve`,
"",
]);
expect(stdout).toBeDefined();
@@ -763,3 +763,99 @@ it("should install version tagged with `latest` by default", async () => {
});
await access(join(package_dir, "bun.lockb"));
});
+
+it("should handle Git URL in dependencies (SCP-style)", async () => {
+ const urls: string[] = [];
+ setHandler(dummyRegistry(urls));
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "foo",
+ version: "0.0.1",
+ }),
+ );
+ const {
+ stdout: stdout1,
+ stderr: stderr1,
+ exited: exited1,
+ } = spawn({
+ cmd: [bunExe(), "add", "bun@github.com:mishoo/UglifyJS.git", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr1).toBeDefined();
+ const err1 = await new Response(stderr1).text();
+ expect(err1).toContain("Saved lockfile");
+ expect(stdout1).toBeDefined();
+ let out1 = await new Response(stdout1).text();
+ out1 = out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "");
+ out1 = out1.replace(/(\.git)#[a-f0-9]+/, "$1");
+ expect(out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ "",
+ " installed uglify-js@git+ssh://bun@github.com:mishoo/UglifyJS.git with binaries:",
+ " - uglifyjs",
+ "",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited1).toBe(0);
+ expect(urls.sort()).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await file(join(package_dir, "package.json")).json()).toEqual({
+ name: "foo",
+ version: "0.0.1",
+ dependencies: {
+ "uglify-js": "bun@github.com:mishoo/UglifyJS.git",
+ },
+ });
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "uglify-js"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["uglifyjs"]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "uglifyjs"))).toBe(
+ join("..", "uglify-js", "bin", "uglifyjs"),
+ );
+ expect((await readdirSorted(join(package_dir, "node_modules", ".cache")))[0]).toBe("9d05c118f06c3b4c.git");
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify-js"))).toEqual([
+ ".bun-tag",
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ const package_json = await file(join(package_dir, "node_modules", "uglify-js", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ await access(join(package_dir, "bun.lockb"));
+ const {
+ stdout: stdout2,
+ stderr: stderr2,
+ exited: exited2,
+ } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr2).toBeDefined();
+ const err2 = await new Response(stderr2).text();
+ expect(err2).not.toContain("Saved lockfile");
+ expect(stdout2).toBeDefined();
+ const out2 = await new Response(stdout2).text();
+ expect(out2.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ "",
+ "Checked 1 installs across 2 packages (no changes)",
+ ]);
+ expect(await exited2).toBe(0);
+ expect(urls.sort()).toEqual([]);
+ expect(requested).toBe(0);
+});
diff --git a/test/bun.js/install/bun-install.test.ts b/test/bun.js/install/bun-install.test.ts
index 360ab41ae..df997bf25 100644
--- a/test/bun.js/install/bun-install.test.ts
+++ b/test/bun.js/install/bun-install.test.ts
@@ -2137,6 +2137,67 @@ it("should handle Git URL in dependencies", async () => {
await access(join(package_dir, "bun.lockb"));
});
+it("should handle Git URL in dependencies (SCP-style)", async () => {
+ const urls: string[] = [];
+ setHandler(dummyRegistry(urls));
+ await writeFile(
+ join(package_dir, "package.json"),
+ JSON.stringify({
+ name: "foo",
+ version: "0.0.1",
+ dependencies: {
+ uglify: "github.com:mishoo/UglifyJS.git",
+ },
+ }),
+ );
+ const { stdout, stderr, exited } = spawn({
+ cmd: [bunExe(), "install", "--config", import.meta.dir + "/basic.toml"],
+ cwd: package_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env,
+ });
+ expect(stderr).toBeDefined();
+ const err = await new Response(stderr).text();
+ expect(err).toContain("Saved lockfile");
+ expect(stdout).toBeDefined();
+ let out = await new Response(stdout).text();
+ out = out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "");
+ out = out.replace(/(\.git)#[a-f0-9]+/, "$1");
+ expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([
+ " + uglify@git+ssh://github.com:mishoo/UglifyJS.git",
+ "",
+ " 1 packages installed",
+ ]);
+ expect(await exited).toBe(0);
+ expect(urls.sort()).toEqual([]);
+ expect(requested).toBe(0);
+ expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "uglify"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["uglifyjs"]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "uglifyjs"))).toBe(
+ join("..", "uglify", "bin", "uglifyjs"),
+ );
+ expect((await readdirSorted(join(package_dir, "node_modules", ".cache")))[0]).toBe("87d55589eb4217d2.git");
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
+ ".bun-tag",
+ ".gitattributes",
+ ".github",
+ ".gitignore",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "README.md",
+ "bin",
+ "lib",
+ "package.json",
+ "test",
+ "tools",
+ ]);
+ const package_json = await file(join(package_dir, "node_modules", "uglify", "package.json")).json();
+ expect(package_json.name).toBe("uglify-js");
+ await access(join(package_dir, "bun.lockb"));
+});
+
it("should handle Git URL with committish in dependencies", async () => {
const urls: string[] = [];
setHandler(dummyRegistry(urls));