aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Alex Lam S.L <alexlamsl@gmail.com> 2023-01-28 17:05:28 +0200
committerGravatar GitHub <noreply@github.com> 2023-01-28 07:05:28 -0800
commit07258bd559a25a08e406f8e3630c04677a920eba (patch)
treeacce7d37e7770ece03986b91ff1f58b0e50b950e
parent0646efbf30a5e4f84973c8a1a312eb224407913d (diff)
downloadbun-07258bd559a25a08e406f8e3630c04677a920eba.tar.gz
bun-07258bd559a25a08e406f8e3630c04677a920eba.tar.zst
bun-07258bd559a25a08e406f8e3630c04677a920eba.zip
[WIP] append GitHub package after fully parsed (#1911)
-rw-r--r--src/install/dependency.zig2
-rw-r--r--src/install/extract_tarball.zig1
-rw-r--r--src/install/install.zig178
-rw-r--r--src/install/lockfile.zig21
-rw-r--r--src/install/repository.zig6
-rw-r--r--test/bun.js/install/bar.tgzbin0 -> 192 bytes
-rw-r--r--test/bun.js/install/baz.tgzbin0 -> 283 bytes
-rw-r--r--test/bun.js/install/bun-install.test.ts198
-rw-r--r--test/bun.js/install/tarball.tgzbin190 -> 0 bytes
9 files changed, 231 insertions, 175 deletions
diff --git a/src/install/dependency.zig b/src/install/dependency.zig
index 405363001..600a90373 100644
--- a/src/install/dependency.zig
+++ b/src/install/dependency.zig
@@ -125,6 +125,7 @@ pub inline fn realname(this: *const Dependency) String {
return switch (this.version.tag) {
.npm => this.version.value.npm.name,
.dist_tag => this.version.value.dist_tag.name,
+ .github => this.version.value.github.package_name,
else => this.name,
};
}
@@ -133,6 +134,7 @@ pub inline fn isAliased(this: *const Dependency, buf: []const u8) bool {
return switch (this.version.tag) {
.npm => !this.version.value.npm.name.eql(this.name, buf, buf),
.dist_tag => !this.version.value.dist_tag.name.eql(this.name, buf, buf),
+ .github => !this.version.value.github.package_name.eql(this.name, buf, buf),
else => false,
};
}
diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig
index b1c1cbcec..81fdf557d 100644
--- a/src/install/extract_tarball.zig
+++ b/src/install/extract_tarball.zig
@@ -25,7 +25,6 @@ registry: string,
cache_dir: std.fs.Dir,
temp_dir: std.fs.Dir,
package_id: PackageID,
-dependency_id: PackageID = Install.invalid_package_id,
skip_verify: bool = false,
integrity: Integrity = Integrity{},
url: string = "",
diff --git a/src/install/install.zig b/src/install/install.zig
index 4ae9574ac..1eaecc678 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -2591,7 +2591,7 @@ pub const PackageManager = struct {
const name = dependency.realname();
const name_hash = switch (dependency.version.tag) {
- .dist_tag, .npm => Lockfile.stringHash(this.lockfile.str(&name)),
+ .dist_tag, .npm, .github => Lockfile.stringHash(this.lockfile.str(&name)),
else => dependency.name_hash,
};
const version = dependency.version;
@@ -2795,53 +2795,41 @@ pub const PackageManager = struct {
return;
},
.github => {
- const package: Lockfile.Package = brk: {
- if (dependency.behavior.isPeer()) return;
- const dep = &dependency.version.value.github;
- const res = Resolution{
- .tag = .github,
- .value = .{
- .github = dep.*,
- },
- };
-
- // First: see if we already loaded the github package in-memory
- if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| {
- // just because we've previously loaded it doesn't mean it was successfully installed
- break :brk this.lockfile.packages.get(pkg_id);
- }
+ if (dependency.behavior.isPeer()) return;
- break :brk try this.lockfile.appendPackage(.{
- .name = name,
- .name_hash = name_hash,
- .resolution = res,
- });
+ const dep = &dependency.version.value.github;
+ const res = Resolution{
+ .tag = .github,
+ .value = .{
+ .github = dep.*,
+ },
};
- switch (this.determinePreinstallState(package, this.lockfile)) {
- .extracting, .extract => {
- const url = this.allocGitHubURL(&package.resolution.value.github) catch unreachable;
- const task_id = Task.Id.forTarball(url);
- var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable;
- if (!entry.found_existing) {
- entry.value_ptr.* = TaskCallbackList{};
- }
+ // First: see if we already loaded the github package in-memory
+ if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| {
+ successFn(this, id, pkg_id);
+ return;
+ }
- const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency";
- try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id));
+ const package = Lockfile.Package{
+ .name = dependency.name,
+ .name_hash = dependency.name_hash,
+ .resolution = res,
+ };
- if (try this.generateNetworkTaskForTarball(task_id, url, package)) |network_task| {
- this.setPreinstallState(package.meta.id, this.lockfile, .extracting);
- this.enqueueNetworkTask(network_task);
- }
- },
- .done => {
- successFn(this, id, package.meta.id);
- },
- else => unreachable,
+ const url = this.allocGitHubURL(dep) catch unreachable;
+ const task_id = Task.Id.forTarball(url);
+ var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable;
+ if (!entry.found_existing) {
+ entry.value_ptr.* = TaskCallbackList{};
}
- return;
+ const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency";
+ try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id));
+
+ if (try this.generateNetworkTaskForTarball(task_id, url, package)) |network_task| {
+ this.enqueueNetworkTask(network_task);
+ }
},
.symlink, .workspace => {
const _result = this.getOrPutResolvedPackage(
@@ -3077,20 +3065,20 @@ pub const PackageManager = struct {
}
const GitHubResolver = struct {
- data_: ExtractData,
- package_name: String,
- package_name_hash: u64,
+ alias: string,
+ alias_ptr: *String,
+ resolved: string,
+ resolution: Resolution,
- pub fn count(this: *@This(), comptime StringBuilderType: type, builder: StringBuilderType, _: JSAst.Expr) void {
- builder.count(this.data_.resolved);
+ pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void {
+ builder.count(this.alias);
+ builder.count(this.resolved);
}
- pub fn resolveWithPackage(this: *@This(), comptime StringBuilderType: type, builder: StringBuilderType, pkg: *Package) anyerror!Resolution {
- pkg.name = this.package_name;
- pkg.name_hash = this.package_name_hash;
- var resolution = pkg.resolution;
- resolution.value.github.resolved = builder.append(String, this.data_.resolved);
-
+ pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution {
+ this.alias_ptr.* = builder.append(String, this.alias);
+ var resolution = this.resolution;
+ resolution.value.github.resolved = builder.append(String, this.resolved);
return resolution;
}
};
@@ -3098,26 +3086,20 @@ pub const PackageManager = struct {
/// Returns true if we need to drain dependencies
fn processExtractedTarballPackage(
manager: *PackageManager,
- package_id: PackageID,
+ package_id: *PackageID,
+ name: string,
+ resolution: Resolution,
data: ExtractData,
comptime log_level: Options.LogLevel,
- ) bool {
- switch (manager.lockfile.packages.items(.resolution)[package_id].tag) {
+ ) ?String {
+ switch (resolution.tag) {
.github => {
- var package = manager.lockfile.packages.get(package_id);
- const package_name = package.name;
- const package_name_hash = package.name_hash;
const package_json_source = logger.Source.initPathString(
data.json_path,
data.json_buf[0..data.json_len],
);
- package.resolution.value.github.resolved = String{};
-
- var github = GitHubResolver{
- .data_ = data,
- .package_name = package_name,
- .package_name_hash = package_name_hash,
- };
+ var package = Lockfile.Package{};
+ var alias = String{};
Lockfile.Package.parse(
manager.lockfile,
@@ -3125,32 +3107,41 @@ pub const PackageManager = struct {
manager.allocator,
manager.log,
package_json_source,
- *GitHubResolver,
- &github,
+ GitHubResolver,
+ GitHubResolver{
+ .alias = name,
+ .alias_ptr = &alias,
+ .resolved = data.resolved,
+ .resolution = resolution,
+ },
Features.npm,
) catch |err| {
if (comptime log_level != .silent) {
const string_buf = manager.lockfile.buffers.string_bytes.items;
Output.prettyErrorln("<r><red>error:<r> expected package.json in <b>{any}<r> to be a JSON file: {s}\n", .{
- package.resolution.fmtURL(&manager.options, package_name.slice(string_buf), string_buf),
+ package.resolution.fmtURL(&manager.options, alias.slice(string_buf), string_buf),
@errorName(err),
});
}
Global.crash();
};
- manager.lockfile.packages.set(package_id, package);
+ package = manager.lockfile.appendPackage(package) catch unreachable;
+ package_id.* = package.meta.id;
+ if (!strings.eql(name, manager.lockfile.str(&package.name))) {
+ manager.lockfile.alias_map.put(manager.allocator, package.meta.id, alias) catch unreachable;
+ }
if (package.dependencies.len > 0) {
manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch unreachable;
}
- return true;
+ return package.name;
},
else => {},
}
- return false;
+ return null;
}
const CacheDir = struct { path: string, is_node_modules: bool };
@@ -3599,14 +3590,14 @@ pub const PackageManager = struct {
}
continue;
}
- const package_id = task.request.extract.tarball.package_id;
manager.extracted_count += 1;
bun.Analytics.Features.extracted_packages = true;
- manager.setPreinstallState(package_id, manager.lockfile, .done);
-
+ var package_id = task.request.extract.tarball.package_id;
+ const alias = task.request.extract.tarball.name.slice();
+ const resolution = task.request.extract.tarball.resolution;
// GitHub (and eventually tarball URL) dependencies are not fully resolved until after the tarball is downloaded & extracted.
- if (manager.processExtractedTarballPackage(package_id, task.data.extract, comptime log_level)) brk: {
+ if (manager.processExtractedTarballPackage(&package_id, alias, resolution, task.data.extract, comptime log_level)) |name| brk: {
// In the middle of an install, you could end up needing to downlaod the github tarball for a dependency
// We need to make sure we resolve the dependencies first before calling the onExtract callback
// TODO: move this into a separate function
@@ -3618,9 +3609,9 @@ pub const PackageManager = struct {
defer {
dependency_list_entry.value_ptr.* = end_dependency_list;
+ dependency_list.deinit(manager.allocator);
if (needs_flush) {
- dependency_list.deinit(manager.allocator);
manager.flushDependencyQueue();
if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) {
@@ -3632,17 +3623,27 @@ pub const PackageManager = struct {
}
for (dependency_list.items) |dep| {
- try manager.processDependencyListItem(dep, &any_root);
-
- if (dep != .dependency and dep != .root_dependency) {
- // if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback.
- end_dependency_list.append(manager.allocator, dep) catch unreachable;
- } else {
- needs_flush = true;
+ switch (dep) {
+ .dependency => |id| {
+ manager.lockfile.buffers.dependencies.items[id].version.value.github.package_name = name;
+ try manager.processDependencyListItem(dep, &any_root);
+ needs_flush = true;
+ },
+ .root_dependency => |id| {
+ manager.dynamicRootDependencies().items[id].dependency.version.value.github.package_name = name;
+ try manager.processDependencyListItem(dep, &any_root);
+ needs_flush = true;
+ },
+ else => {
+ // if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback.
+ end_dependency_list.append(manager.allocator, dep) catch unreachable;
+ },
}
}
}
+ manager.setPreinstallState(package_id, manager.lockfile, .done);
+
if (comptime @TypeOf(callbacks.onExtract) != void) {
callbacks.onExtract(extract_ctx, package_id, task.data.extract, comptime log_level);
}
@@ -5877,8 +5878,8 @@ pub const PackageManager = struct {
) void {
const buf = this.lockfile.buffers.string_bytes.items;
+ const alias = if (this.lockfile.alias_map.get(package_id)) |str| str.slice(buf) else name;
const destination_dir_subpath: [:0]u8 = brk: {
- const alias = if (this.lockfile.alias_map.get(package_id)) |str| str.slice(buf) else name;
std.mem.copy(u8, &this.destination_dir_subpath_buf, alias);
this.destination_dir_subpath_buf[alias.len] = 0;
break :brk this.destination_dir_subpath_buf[0..alias.len :0];
@@ -6035,7 +6036,7 @@ pub const PackageManager = struct {
// .destination_dir_subpath = destination_dir_subpath,
.root_node_modules_folder = this.root_node_modules_folder.dir.fd,
- .package_name = strings.StringOrTinyString.init(name),
+ .package_name = strings.StringOrTinyString.init(alias),
.string_buf = buf,
.extern_string_buf = extern_string_buf,
};
@@ -6044,7 +6045,7 @@ pub const PackageManager = struct {
if (bin_linker.err) |err| {
if (comptime log_level != .silent) {
const fmt = "\n<r><red>error:<r> linking <b>{s}<r>: {s}\n";
- const args = .{ name, @errorName(err) };
+ const args = .{ alias, @errorName(err) };
if (comptime log_level.showProgress()) {
if (Output.enable_ansi_colors) {
@@ -6394,7 +6395,10 @@ pub const PackageManager = struct {
// Don't attempt to link incompatible binaries
if (meta.isDisabled()) continue;
- const name: string = lockfile.str(&installer.names[resolved_id]);
+ const name: string = brk: {
+ const alias = this.lockfile.alias_map.get(package_id) orelse installer.names[resolved_id];
+ break :brk lockfile.str(&alias);
+ };
if (!installer.has_created_bin) {
if (!this.options.global) {
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index c8033c049..eaec4f2f6 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -1039,7 +1039,10 @@ pub const Printer = struct {
if (package_id > end) continue;
const is_new = installed.isSet(package_id);
- const package_name = names[package_id].slice(string_buf);
+ const package_name = brk: {
+ const alias = this.lockfile.alias_map.get(package_id) orelse names[package_id];
+ break :brk alias.slice(string_buf);
+ };
if (this.updates.len > 0) {
const name_hash = names_hashes[package_id];
@@ -1109,7 +1112,7 @@ pub const Printer = struct {
for (this.updates) |_, update_id| {
const package_id = id_map[update_id];
if (package_id == std.math.maxInt(PackageID)) continue;
- const name = names[package_id];
+ const name = this.lockfile.alias_map.get(package_id) orelse names[package_id];
const bin = bins[package_id];
const package_name = name.slice(string_buf);
@@ -2820,15 +2823,11 @@ pub const Package = extern struct {
if (comptime !features.is_main) {
if (comptime ResolverContext != void) {
- if (comptime std.meta.trait.is(.Pointer)(ResolverContext) and @hasDecl(std.meta.Child(ResolverContext), "resolveWithPackage")) {
- package.resolution = try resolver.resolveWithPackage(*Lockfile.StringBuilder, &string_builder, package);
- } else {
- package.resolution = try resolver.resolve(
- *Lockfile.StringBuilder,
- &string_builder,
- json,
- );
- }
+ package.resolution = try resolver.resolve(
+ *Lockfile.StringBuilder,
+ &string_builder,
+ json,
+ );
}
} else {
package.resolution = .{
diff --git a/src/install/repository.zig b/src/install/repository.zig
index d1d494059..ea48210b3 100644
--- a/src/install/repository.zig
+++ b/src/install/repository.zig
@@ -12,12 +12,14 @@ pub const Repository = extern struct {
repo: String = String{},
committish: GitSHA = GitSHA{},
resolved: String = String{},
+ package_name: String = String{},
pub fn verify(this: *const Repository) void {
this.owner.assertDefined();
this.repo.assertDefined();
this.committish.assertDefined();
this.resolved.assertDefined();
+ this.package_name.assertDefined();
}
pub fn order(lhs: *const Repository, rhs: *const Repository, lhs_buf: []const u8, rhs_buf: []const u8) std.math.Order {
@@ -34,6 +36,7 @@ pub const Repository = extern struct {
builder.count(this.repo.slice(buf));
builder.count(this.committish.slice(buf));
builder.count(this.resolved.slice(buf));
+ builder.count(this.package_name.slice(buf));
}
pub fn clone(this: *const Repository, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) Repository {
@@ -41,7 +44,8 @@ pub const Repository = extern struct {
.owner = builder.append(String, this.owner.slice(buf)),
.repo = builder.append(String, this.repo.slice(buf)),
.committish = builder.append(GitSHA, this.committish.slice(buf)),
- .resolved = builder.append(GitSHA, this.resolved.slice(buf)),
+ .resolved = builder.append(String, this.resolved.slice(buf)),
+ .package_name = builder.append(String, this.package_name.slice(buf)),
};
}
diff --git a/test/bun.js/install/bar.tgz b/test/bun.js/install/bar.tgz
new file mode 100644
index 000000000..1983142d7
--- /dev/null
+++ b/test/bun.js/install/bar.tgz
Binary files differ
diff --git a/test/bun.js/install/baz.tgz b/test/bun.js/install/baz.tgz
new file mode 100644
index 000000000..375a5e31f
--- /dev/null
+++ b/test/bun.js/install/baz.tgz
Binary files differ
diff --git a/test/bun.js/install/bun-install.test.ts b/test/bun.js/install/bun-install.test.ts
index a0db3df96..f1139bdd5 100644
--- a/test/bun.js/install/bun-install.test.ts
+++ b/test/bun.js/install/bun-install.test.ts
@@ -18,18 +18,18 @@ import {
rm,
writeFile,
} from "fs/promises";
-import { join } from "path";
+import { basename, join } from "path";
import { tmpdir } from "os";
import { realpathSync } from "fs";
let handler, package_dir, requested, server;
-function dummyRegistry(urls, version = "0.0.2") {
+function dummyRegistry(urls, version = "0.0.2", props = {}) {
return async (request) => {
urls.push(request.url);
expect(request.method).toBe("GET");
if (request.url.endsWith(".tgz")) {
- return new Response(file(join(import.meta.dir, "tarball.tgz")));
+ return new Response(file(join(import.meta.dir, basename(request.url))));
}
expect(request.headers.get("accept")).toBe(
"application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*",
@@ -47,6 +47,7 @@ function dummyRegistry(urls, version = "0.0.2") {
dist: {
tarball: `${request.url}.tgz`,
},
+ ...props,
},
},
"dist-tags": {
@@ -231,7 +232,7 @@ it("should handle empty string in dependencies", async () => {
expect(
await file(join(package_dir, "node_modules", "bar", "package.json")).json(),
).toEqual({
- name: "baz",
+ name: "bar",
version: "0.0.2",
});
await access(join(package_dir, "bun.lockb"));
@@ -645,7 +646,7 @@ it("should handle ^0 in dependencies", async () => {
expect(
await file(join(package_dir, "node_modules", "bar", "package.json")).json(),
).toEqual({
- name: "baz",
+ name: "bar",
version: "0.0.2",
});
await access(join(package_dir, "bun.lockb"));
@@ -737,7 +738,7 @@ it("should handle ^0.0 in dependencies", async () => {
expect(
await file(join(package_dir, "node_modules", "bar", "package.json")).json(),
).toEqual({
- name: "baz",
+ name: "bar",
version: "0.0.2",
});
await access(join(package_dir, "bun.lockb"));
@@ -868,7 +869,7 @@ it("should handle ^0.0.2 in dependencies", async () => {
expect(
await file(join(package_dir, "node_modules", "bar", "package.json")).json(),
).toEqual({
- name: "baz",
+ name: "bar",
version: "0.0.2",
});
await access(join(package_dir, "bun.lockb"));
@@ -921,7 +922,7 @@ it("should handle ^0.0.2-rc in dependencies", async () => {
expect(
await file(join(package_dir, "node_modules", "bar", "package.json")).json(),
).toEqual({
- name: "baz",
+ name: "bar",
version: "0.0.2",
});
await access(join(package_dir, "bun.lockb"));
@@ -974,7 +975,7 @@ it("should handle ^0.0.2-alpha.3+b4d in dependencies", async () => {
expect(
await file(join(package_dir, "node_modules", "bar", "package.json")).json(),
).toEqual({
- name: "baz",
+ name: "bar",
version: "0.0.2",
});
await access(join(package_dir, "bun.lockb"));
@@ -982,7 +983,11 @@ it("should handle ^0.0.2-alpha.3+b4d in dependencies", async () => {
it("should handle dependency aliasing", async () => {
const urls = [];
- handler = dummyRegistry(urls);
+ handler = dummyRegistry(urls, "0.0.3", {
+ bin: {
+ "baz-run": "index.js",
+ },
+ });
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
@@ -1007,7 +1012,7 @@ it("should handle dependency aliasing", async () => {
expect(stdout).toBeDefined();
const out = await new Response(stdout).text();
expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
- " + baz@0.0.2",
+ " + Bar@0.0.3",
"",
" 1 packages installed",
]);
@@ -1018,31 +1023,42 @@ it("should handle dependency aliasing", async () => {
]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
".cache",
"Bar",
]);
- expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual(
- ["package.json"],
- );
- expect(
- await file(join(package_dir, "node_modules", "Bar", "package.json")).json(),
- ).toEqual({
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "baz-run",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "Bar", "index.js"));
+ expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual([
+ "index.js",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "node_modules", "Bar", "package.json")).json()).toEqual({
name: "baz",
- version: "0.0.2",
+ version: "0.0.3",
+ bin: {
+ "baz-run": "index.js",
+ },
});
await access(join(package_dir, "bun.lockb"));
});
it("should handle dependency aliasing (versioned)", async () => {
const urls: string[] = [];
- handler = dummyRegistry(urls);
+ handler = dummyRegistry(urls, "0.0.3", {
+ bin: {
+ "baz-run": "index.js",
+ },
+ });
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
name: "Foo",
version: "0.0.1",
dependencies: {
- Bar: "npm:baz@0.0.2",
+ Bar: "npm:baz@0.0.3",
},
}),
);
@@ -1060,7 +1076,7 @@ it("should handle dependency aliasing (versioned)", async () => {
expect(stdout).toBeDefined();
const out = await new Response(stdout).text();
expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
- " + baz@0.0.2",
+ " + Bar@0.0.3",
"",
" 1 packages installed",
]);
@@ -1071,24 +1087,35 @@ it("should handle dependency aliasing (versioned)", async () => {
]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
".cache",
"Bar",
]);
- expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual(
- ["package.json"],
- );
- expect(
- await file(join(package_dir, "node_modules", "Bar", "package.json")).json(),
- ).toEqual({
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "baz-run",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "Bar", "index.js"));
+ expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual([
+ "index.js",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "node_modules", "Bar", "package.json")).json()).toEqual({
name: "baz",
- version: "0.0.2",
+ version: "0.0.3",
+ bin: {
+ "baz-run": "index.js",
+ },
});
await access(join(package_dir, "bun.lockb"));
});
it("should handle dependency aliasing (dist-tagged)", async () => {
const urls: string[] = [];
- handler = dummyRegistry(urls);
+ handler = dummyRegistry(urls, "0.0.3", {
+ bin: {
+ "baz-run": "index.js",
+ },
+ });
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
@@ -1113,7 +1140,7 @@ it("should handle dependency aliasing (dist-tagged)", async () => {
expect(stdout).toBeDefined();
const out = await new Response(stdout).text();
expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
- " + baz@0.0.2",
+ " + Bar@0.0.3",
"",
" 1 packages installed",
]);
@@ -1124,24 +1151,35 @@ it("should handle dependency aliasing (dist-tagged)", async () => {
]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
".cache",
"Bar",
]);
- expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual(
- ["package.json"],
- );
- expect(
- await file(join(package_dir, "node_modules", "Bar", "package.json")).json(),
- ).toEqual({
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "baz-run",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "Bar", "index.js"));
+ expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual([
+ "index.js",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "node_modules", "Bar", "package.json")).json()).toEqual({
name: "baz",
- version: "0.0.2",
+ version: "0.0.3",
+ bin: {
+ "baz-run": "index.js",
+ },
});
await access(join(package_dir, "bun.lockb"));
});
it("should not reinstall aliased dependencies", async () => {
const urls = [];
- handler = dummyRegistry(urls);
+ handler = dummyRegistry(urls, "0.0.3", {
+ bin: {
+ "baz-run": "index.js",
+ },
+ });
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
@@ -1170,7 +1208,7 @@ it("should not reinstall aliased dependencies", async () => {
expect(stdout1).toBeDefined();
const out1 = await new Response(stdout1).text();
expect(out1.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
- " + baz@0.0.2",
+ " + Bar@0.0.3",
"",
" 1 packages installed",
]);
@@ -1181,17 +1219,24 @@ it("should not reinstall aliased dependencies", async () => {
]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
".cache",
"Bar",
]);
- expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual(
- ["package.json"],
- );
- expect(
- await file(join(package_dir, "node_modules", "Bar", "package.json")).json(),
- ).toEqual({
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "baz-run",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "Bar", "index.js"));
+ expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual([
+ "index.js",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "node_modules", "Bar", "package.json")).json()).toEqual({
name: "baz",
- version: "0.0.2",
+ version: "0.0.3",
+ bin: {
+ "baz-run": "index.js",
+ },
});
await access(join(package_dir, "bun.lockb"));
// Performs `bun install` again, expects no-op
@@ -1221,17 +1266,24 @@ it("should not reinstall aliased dependencies", async () => {
expect(urls).toEqual([]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
+ ".bin",
".cache",
"Bar",
]);
- expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual(
- ["package.json"],
- );
- expect(
- await file(join(package_dir, "node_modules", "Bar", "package.json")).json(),
- ).toEqual({
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual([
+ "baz-run",
+ ]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "Bar", "index.js"));
+ expect(await readdirSorted(join(package_dir, "node_modules", "Bar"))).toEqual([
+ "index.js",
+ "package.json",
+ ]);
+ expect(await file(join(package_dir, "node_modules", "Bar", "package.json")).json()).toEqual({
name: "baz",
- version: "0.0.2",
+ version: "0.0.3",
+ bin: {
+ "baz-run": "index.js",
+ },
});
await access(join(package_dir, "bun.lockb"));
});
@@ -1515,25 +1567,25 @@ it("should handle GitHub URL in dependencies (github:user/repo#tag)", async () =
expect(
await readdirSorted(join(package_dir, "node_modules", ".bin")),
).toEqual(["uglifyjs"]);
+ expect(await readlink(join(package_dir, "node_modules", ".bin", "uglifyjs"))).toBe(join(
+ "..",
+ "uglify",
+ "bin",
+ "uglifyjs",
+ ));
expect(
await readdirSorted(join(package_dir, "node_modules", ".cache")),
).toEqual(["@GH@mishoo-UglifyJS-e219a9a", "uglify"]);
- expect(
- await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify")),
- ).toEqual(["mishoo-UglifyJS-e219a9a"]);
- expect(
- await readlink(
- join(
- package_dir,
- "node_modules",
- ".cache",
- "uglify",
- "mishoo-UglifyJS-e219a9a",
- ),
- ),
- ).toBe(
- join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"),
- );
+ expect(await readdirSorted(join(package_dir, "node_modules", ".cache", "uglify"))).toEqual([
+ "mishoo-UglifyJS-e219a9a",
+ ]);
+ expect(await readlink(join(
+ package_dir,
+ "node_modules",
+ ".cache",
+ "uglify",
+ "mishoo-UglifyJS-e219a9a",
+ ))).toBe(join(package_dir, "node_modules", ".cache", "@GH@mishoo-UglifyJS-e219a9a"));
expect(
await readdirSorted(join(package_dir, "node_modules", "uglify")),
).toEqual([
@@ -1599,12 +1651,8 @@ it("should handle GitHub URL in dependencies (https://github.com/user/repo.git)"
".cache",
"uglify",
]);
- expect(
- await readdirSorted(join(package_dir, "node_modules", ".bin")),
- ).toEqual(["uglifyjs"]);
- expect(
- await readdirSorted(join(package_dir, "node_modules", "uglify")),
- ).toEqual([
+ expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["uglifyjs"]);
+ expect(await readdirSorted(join(package_dir, "node_modules", "uglify"))).toEqual([
".bun-tag",
".gitattributes",
".github",
diff --git a/test/bun.js/install/tarball.tgz b/test/bun.js/install/tarball.tgz
deleted file mode 100644
index 384081343..000000000
--- a/test/bun.js/install/tarball.tgz
+++ /dev/null
Binary files differ