diff options
Diffstat (limited to 'test/cli/install/bun-install.test.ts')
-rw-r--r-- | test/cli/install/bun-install.test.ts | 538 |
1 files changed, 521 insertions, 17 deletions
diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 9ee77be8c..e41aa7743 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -659,6 +659,13 @@ it("should ignore peerDependencies within workspaces", async () => { }, }), ); + await writeFile( + join(package_dir, "bunfig.toml"), + ` + [install] + peer = false + `, + ); const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], cwd: package_dir, @@ -679,11 +686,87 @@ it("should ignore peerDependencies within workspaces", async () => { ]); expect(await exited).toBe(0); expect(requested).toBe(0); - expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Baz"]); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual(["Baz"]); expect(await readlink(join(package_dir, "node_modules", "Baz"))).toBe(join("..", "packages", "baz")); await access(join(package_dir, "bun.lockb")); }); +it("should handle installing the same peerDependency with different versions", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + peerDependencies: { + peer: "0.0.2", + }, + dependencies: { + boba: "0.0.2", + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(requested).toBe(0); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + boba@0.0.2", + "", + " 2 packages installed", + ]); +}); + +it("should handle installing the same peerDependency with the same version", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + peerDependencies: { + peer: "0.0.1", + }, + dependencies: { + boba: "0.0.2", + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(requested).toBe(0); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + boba@0.0.2", + "", + " 1 package installed", + ]); +}); + it("should handle life-cycle scripts within workspaces", async () => { await writeFile( join(package_dir, "package.json"), @@ -737,6 +820,8 @@ it("should handle life-cycle scripts within workspaces", async () => { }); it("should handle life-cycle scripts during re-installation", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); await writeFile( join(package_dir, "package.json"), JSON.stringify({ @@ -745,6 +830,10 @@ it("should handle life-cycle scripts during re-installation", async () => { scripts: { install: [bunExe(), "index.js"].join(" "), }, + dependencies: { + qux: "^0.0", + }, + trustedDependencies: ["qux"], workspaces: ["bar"], }), ); @@ -782,13 +871,15 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "[scripts:run] Bar", " + Bar@workspace:bar", + " + qux@0.0.2", "[scripts:run] Foo", + "[scripts:run] Qux", "", - " 1 package installed", + " 2 packages installed", ]); expect(await exited1).toBe(0); - expect(requested).toBe(0); - expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar"]); + expect(requested).toBe(2); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar", "qux"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); await access(join(package_dir, "bun.lockb")); // Perform `bun install` again but with lockfile from before @@ -814,13 +905,15 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(out2.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "[scripts:run] Bar", " + Bar@workspace:bar", + " + qux@0.0.2", "[scripts:run] Foo", + "[scripts:run] Qux", "", - " 1 package installed", + " 2 packages installed", ]); expect(await exited2).toBe(0); - expect(requested).toBe(0); - expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual(["Bar"]); + expect(requested).toBe(3); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar", "qux"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); await access(join(package_dir, "bun.lockb")); // Perform `bun install --production` with lockfile from before @@ -846,13 +939,15 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(out3.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "[scripts:run] Bar", " + Bar@workspace:bar", + " + qux@0.0.2", "[scripts:run] Foo", + "[scripts:run] Qux", "", - " 1 package installed", + " 2 packages installed", ]); expect(await exited3).toBe(0); - expect(requested).toBe(0); - expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual(["Bar"]); + expect(requested).toBe(4); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar", "qux"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); await access(join(package_dir, "bun.lockb")); }); @@ -1408,6 +1503,157 @@ it("should handle ^0.0.2 in dependencies", async () => { await access(join(package_dir, "bun.lockb")); }); +it("should handle matching workspaces from dependencies", async () => { + const urls: string[] = []; + setHandler( + dummyRegistry(urls, { + "0.2.0": { as: "0.2.0" }, + }), + ); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + workspaces: ["packages/*"], + }), + ); + await mkdir(join(package_dir, "packages", "pkg1"), { recursive: true }); + await mkdir(join(package_dir, "packages", "pkg2"), { recursive: true }); + await writeFile( + join(package_dir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "0.2.0", + }), + ); + + await writeFile( + join(package_dir, "packages", "pkg2", "package.json"), + JSON.stringify({ + name: "pkg2", + version: "0.2.0", + dependencies: { + // moo has a dependency on pkg1 that matches 0.2.0 + moo: "0.2.0", + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + pkg1@workspace:packages/pkg1", + " + pkg2@workspace:packages/pkg2", + "", + " 3 packages installed", + ]); + expect(await exited).toBe(0); + await access(join(package_dir, "bun.lockb")); +}); + +it("should edit package json correctly with git dependencies", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + const package_json = JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: {}, + }); + await writeFile(join(package_dir, "package.json"), package_json); + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "dylan-conway/install-test2"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + expect(await file(join(package_dir, "package.json")).json()).toEqual({ + name: "foo", + version: "0.0.1", + dependencies: { + "install-test2": "dylan-conway/install-test2", + }, + }); + await writeFile(join(package_dir, "package.json"), package_json); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "dylan-conway/install-test2#HEAD"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + expect(await file(join(package_dir, "package.json")).json()).toEqual({ + name: "foo", + version: "0.0.1", + dependencies: { + "install-test2": "dylan-conway/install-test2#HEAD", + }, + }); + await writeFile(join(package_dir, "package.json"), package_json); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "github:dylan-conway/install-test2"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + expect(await file(join(package_dir, "package.json")).json()).toEqual({ + name: "foo", + version: "0.0.1", + dependencies: { + "install-test2": "github:dylan-conway/install-test2", + }, + }); + await writeFile(join(package_dir, "package.json"), package_json); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "github:dylan-conway/install-test2#HEAD"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + expect(await file(join(package_dir, "package.json")).json()).toEqual({ + name: "foo", + version: "0.0.1", + dependencies: { + "install-test2": "github:dylan-conway/install-test2#HEAD", + }, + }); + await access(join(package_dir, "bun.lockb")); +}); + it("should handle ^0.0.2-rc in dependencies", async () => { const urls: string[] = []; setHandler(dummyRegistry(urls, { "0.0.2-rc": { as: "0.0.2" } })); @@ -1778,6 +2024,130 @@ it("should prefer latest-tagged dependency", async () => { await access(join(package_dir, "bun.lockb")); }); +it("should install latest with prereleases", async () => { + const urls: string[] = []; + setHandler( + dummyRegistry(urls, { + "1.0.0-0": { as: "0.0.3" }, + "1.0.0-8": { as: "0.0.5" }, + latest: "1.0.0-0", + }), + ); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "baz"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\n/)).toEqual([ + "", + " installed baz@1.0.0-0", + "", + "", + " 1 package installed", + ]); + expect(await exited).toBe(0); + expect(requested).toBe(2); + await rm(join(package_dir, "node_modules"), { recursive: true, force: true }); + await rm(join(package_dir, "bun.lockb"), { recursive: true, force: true }); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + baz: "latest", + }, + }), + ); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\n/)).toEqual([" + baz@1.0.0-0", "", " 1 package installed"]); + expect(await exited).toBe(0); + await rm(join(package_dir, "node_modules"), { recursive: true, force: true }); + await rm(join(package_dir, "bun.lockb"), { recursive: true, force: true }); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + baz: "^1.0.0-5", + }, + }), + ); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\n/)).toEqual([" + baz@1.0.0-8", "", " 1 package installed"]); + expect(await exited).toBe(0); + + await rm(join(package_dir, "node_modules"), { recursive: true, force: true }); + await rm(join(package_dir, "bun.lockb"), { recursive: true, force: true }); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + dependencies: { + baz: "^1.0.0-0", + }, + }), + ); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\n/)).toEqual([" + baz@1.0.0-0", "", " 1 package installed"]); + expect(await exited).toBe(0); + await access(join(package_dir, "bun.lockb")); +}); + it("should handle dependency aliasing", async () => { const urls: string[] = []; setHandler( @@ -3174,7 +3544,11 @@ it("should handle GitHub tarball URL in dependencies (https://github.com/user/re it("should treat non-GitHub http(s) URLs as tarballs (https://some.url/path?stuff)", async () => { const urls: string[] = []; - setHandler(dummyRegistry(urls)); + setHandler( + dummyRegistry(urls, { + "4.3.0": { as: "4.3.0" }, + }), + ); await writeFile( join(package_dir, "package.json"), JSON.stringify({ @@ -3204,11 +3578,11 @@ it("should treat non-GitHub http(s) URLs as tarballs (https://some.url/path?stuf expect(out.split(/\r?\n/)).toEqual([ " + @vercel/turbopack-node@https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-node/js?turbopack-230922.2", "", - " 1 package installed", + " 2 packages installed", ]); expect(await exited).toBe(0); - expect(urls.sort()).toBeEmpty(); - expect(requested).toBe(0); + expect(urls.sort()).toHaveLength(2); + expect(requested).toBe(2); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "@vercel", "loader-runner"]); expect(await readdirSorted(join(package_dir, "node_modules", "@vercel"))).toEqual(["turbopack-node"]); expect(await readdirSorted(join(package_dir, "node_modules", "@vercel", "turbopack-node"))).toEqual([ @@ -3216,8 +3590,6 @@ it("should treat non-GitHub http(s) URLs as tarballs (https://some.url/path?stuf "src", "tsconfig.json", ]); - const package_json = await file(join(package_dir, "node_modules", "when", "package.json")).json(); - expect(package_json.name).toBe("when"); await access(join(package_dir, "bun.lockb")); }); @@ -3396,7 +3768,7 @@ it("should consider peerDependencies during hoisting", async () => { }), ); const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--peer"], + cmd: [bunExe(), "install"], cwd: package_dir, stdout: null, stdin: "pipe", @@ -3447,6 +3819,105 @@ it("should consider peerDependencies during hoisting", async () => { await access(join(package_dir, "bun.lockb")); }); +it("should install peerDependencies when needed", async () => { + const urls: string[] = []; + setHandler( + dummyRegistry(urls, { + "0.0.3": { + bin: { + "baz-run": "index.js", + }, + }, + "0.0.5": { + bin: { + "baz-exec": "index.js", + }, + }, + }), + ); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + peerDependencies: { + baz: ">=0.0.3", + }, + workspaces: ["bar", "moo"], + }), + ); + await mkdir(join(package_dir, "bar")); + await writeFile( + join(package_dir, "bar", "package.json"), + JSON.stringify({ + name: "bar", + version: "0.0.2", + dependencies: { + baz: "0.0.3", + }, + }), + ); + await mkdir(join(package_dir, "moo")); + await writeFile( + join(package_dir, "moo", "package.json"), + JSON.stringify({ + name: "moo", + version: "0.0.4", + dependencies: { + baz: "0.0.5", + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + bar@workspace:bar", + " + moo@workspace:moo", + "", + " 4 packages installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/baz`, `${root_url}/baz-0.0.3.tgz`, `${root_url}/baz-0.0.5.tgz`]); + expect(requested).toBe(3); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar", "baz", "moo"]); + expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toEqual(["baz-exec", "baz-run"]); + expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-exec"))).toBe( + join("..", "..", "moo", "node_modules", "baz", "index.js"), + ); + expect(await readlink(join(package_dir, "node_modules", ".bin", "baz-run"))).toBe(join("..", "baz", "index.js")); + expect(await readlink(join(package_dir, "node_modules", "bar"))).toBe(join("..", "bar")); + expect(await readdirSorted(join(package_dir, "bar"))).toEqual(["package.json"]); + expect(await readdirSorted(join(package_dir, "node_modules", "baz"))).toEqual(["index.js", "package.json"]); + expect(await file(join(package_dir, "node_modules", "baz", "package.json")).json()).toEqual({ + name: "baz", + version: "0.0.3", + bin: { + "baz-run": "index.js", + }, + }); + expect(await readlink(join(package_dir, "node_modules", "moo"))).toBe(join("..", "moo")); + expect(await readdirSorted(join(package_dir, "moo"))).toEqual(["node_modules", "package.json"]); + expect(await file(join(package_dir, "moo", "node_modules", "baz", "package.json")).json()).toEqual({ + name: "baz", + version: "0.0.5", + bin: { + "baz-exec": "index.js", + }, + }); + await access(join(package_dir, "bun.lockb")); +}); + it("should not regard peerDependencies declarations as duplicates", async () => { const urls: string[] = []; setHandler(dummyRegistry(urls)); @@ -6928,6 +7399,39 @@ it("should handle `workspace:*` on both root & child", async () => { await access(join(package_dir, "bun.lockb")); }); +it("should install peer dependencies from root package", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + peerDependencies: { + bar: "0.0.2", + }, + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + env, + stdout: null, + stdin: "pipe", + stderr: "pipe", + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(stdout).toBeDefined(); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", " 1 package installed"]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/bar`, `${root_url}/bar-0.0.2.tgz`]); + expect(requested).toBe(2); + + await access(join(package_dir, "bun.lockb")); +}); + describe("Registry URLs", () => { // Some of the non failing URLs are invalid, but bun's URL parser ignores // the validation error and returns a valid serialized URL anyway. |