aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Alex Lam S.L <alexlamsl@gmail.com> 2023-07-04 12:09:58 +0300
committerGravatar GitHub <noreply@github.com> 2023-07-04 02:09:58 -0700
commit3d0ffc48cb2608ed696581e10cb082c68b56c6b9 (patch)
tree72bb50ad2491b4b7f1b0e0ee5de172cd59fbba1f
parentbc7b5165beba1388a39215ccfa8e848e4982f1ed (diff)
downloadbun-3d0ffc48cb2608ed696581e10cb082c68b56c6b9.tar.gz
bun-3d0ffc48cb2608ed696581e10cb082c68b56c6b9.tar.zst
bun-3d0ffc48cb2608ed696581e10cb082c68b56c6b9.zip
[install] fix run-time module loading (#3510)
- fix version buffer confusion - improve workaround to handle cached modules fixes #3507
-rw-r--r--src/install/install.zig84
-rw-r--r--src/js/out/modules/node/http.js2
-rw-r--r--src/resolver/resolver.zig46
-rw-r--r--test/cli/install/bun-run.test.ts108
4 files changed, 154 insertions, 86 deletions
diff --git a/src/install/install.zig b/src/install/install.zig
index 87f931291..f6133afce 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -1742,55 +1742,51 @@ pub const PackageManager = struct {
pub fn enqueueDependencyToRoot(
this: *PackageManager,
name: []const u8,
- version_buf: []const u8,
version: *const Dependency.Version,
+ version_buf: []const u8,
behavior: Dependency.Behavior,
) DependencyToEnqueue {
- const str_buf = this.lockfile.buffers.string_bytes.items;
- for (this.lockfile.buffers.dependencies.items, 0..) |dependency, dependency_id| {
- if (!strings.eqlLong(dependency.name.slice(str_buf), name, true)) continue;
- if (!dependency.version.eql(version, str_buf, version_buf)) continue;
- return switch (this.lockfile.buffers.resolutions.items[dependency_id]) {
- invalid_package_id => .{
- .pending = @truncate(DependencyID, dependency_id),
- },
- else => |resolution_id| .{
- .resolution = .{
- .resolution = this.lockfile.packages.items(.resolution)[resolution_id],
- .package_id = resolution_id,
- },
- },
+ const dep_id = @truncate(DependencyID, brk: {
+ const str_buf = this.lockfile.buffers.string_bytes.items;
+ for (this.lockfile.buffers.dependencies.items, 0..) |dep, id| {
+ if (!strings.eqlLong(dep.name.slice(str_buf), name, true)) continue;
+ if (!dep.version.eql(version, str_buf, version_buf)) continue;
+ break :brk id;
+ }
+
+ var builder = this.lockfile.stringBuilder();
+ const dummy = Dependency{
+ .name = String.init(name, name),
+ .name_hash = String.Builder.stringHash(name),
+ .version = version.*,
+ .behavior = behavior,
};
- }
+ dummy.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder);
- var builder = this.lockfile.stringBuilder();
- const dependency = Dependency{
- .name = String.init(name, name),
- .name_hash = String.Builder.stringHash(name),
- .version = version.*,
- .behavior = behavior,
- };
- dependency.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder);
-
- builder.allocate() catch |err| return .{ .failure = err };
-
- const cloned_dependency = dependency.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable;
- builder.clamp();
- const index = @truncate(DependencyID, this.lockfile.buffers.dependencies.items.len);
- this.lockfile.buffers.dependencies.append(this.allocator, cloned_dependency) catch unreachable;
- this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable;
- if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len);
- this.enqueueDependencyWithMainAndSuccessFn(
- index,
- &cloned_dependency,
- invalid_package_id,
- assignRootResolution,
- failRootResolution,
- ) catch |err| {
- return .{ .failure = err };
- };
+ builder.allocate() catch |err| return .{ .failure = err };
+
+ const dep = dummy.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable;
+ builder.clamp();
+ const index = this.lockfile.buffers.dependencies.items.len;
+ this.lockfile.buffers.dependencies.append(this.allocator, dep) catch unreachable;
+ this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable;
+ if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len);
+ break :brk index;
+ });
+
+ if (this.lockfile.buffers.resolutions.items[dep_id] == invalid_package_id) {
+ this.enqueueDependencyWithMainAndSuccessFn(
+ dep_id,
+ &this.lockfile.buffers.dependencies.items[dep_id],
+ invalid_package_id,
+ assignRootResolution,
+ failRootResolution,
+ ) catch |err| {
+ return .{ .failure = err };
+ };
+ }
- const resolution_id = switch (this.lockfile.buffers.resolutions.items[index]) {
+ const resolution_id = switch (this.lockfile.buffers.resolutions.items[dep_id]) {
invalid_package_id => brk: {
this.drainDependencyList();
@@ -1815,7 +1811,7 @@ pub const PackageManager = struct {
},
}
- break :brk this.lockfile.buffers.resolutions.items[index];
+ break :brk this.lockfile.buffers.resolutions.items[dep_id];
},
// we managed to synchronously resolve the dependency
else => |pkg_id| pkg_id,
diff --git a/src/js/out/modules/node/http.js b/src/js/out/modules/node/http.js
index 955c83642..f07dcc2e0 100644
--- a/src/js/out/modules/node/http.js
+++ b/src/js/out/modules/node/http.js
@@ -1085,6 +1085,8 @@ var tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/, METHODS = [
debug(`${NODE_HTTP_WARNING}\n`, "setMaxIdleHTTPParsers() is a no-op");
},
globalAgent,
+ ClientRequest,
+ OutgoingMessage,
[Symbol.for("CommonJS")]: 0
}, http_default = defaultObject;
export {
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index e1e83ba4f..409df85af 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -1256,14 +1256,8 @@ pub const Resolver = struct {
if (check_package) {
if (r.opts.polyfill_node_globals) {
- var import_path_without_node_prefix = import_path;
- const had_node_prefix = import_path_without_node_prefix.len > "node:".len and
- strings.eqlComptime(import_path_without_node_prefix[0.."node:".len], "node:");
-
- import_path_without_node_prefix = if (had_node_prefix)
- import_path_without_node_prefix["node:".len..]
- else
- import_path_without_node_prefix;
+ const had_node_prefix = strings.hasPrefixComptime(import_path, "node:");
+ const import_path_without_node_prefix = if (had_node_prefix) import_path["node:".len..] else import_path;
if (NodeFallbackModules.Map.get(import_path_without_node_prefix)) |*fallback_module| {
result.path_pair.primary = fallback_module.path;
@@ -1278,7 +1272,7 @@ pub const Resolver = struct {
} else if (had_node_prefix or
(strings.hasPrefixComptime(import_path_without_node_prefix, "fs") and
(import_path_without_node_prefix.len == 2 or
- import_path_without_node_prefix[3] == '/')))
+ import_path_without_node_prefix[2] == '/')))
{
result.path_pair.primary.namespace = "node";
result.path_pair.primary.text = import_path_without_node_prefix;
@@ -1698,8 +1692,9 @@ pub const Resolver = struct {
// If the source directory doesn't have a node_modules directory, we can
// check the global cache directory for a package.json file.
var manager = r.getPackageManager();
- var dependency_version: Dependency.Version = .{};
+ var dependency_version = Dependency.Version{};
var dependency_behavior = @enumFromInt(Dependency.Behavior, Dependency.Behavior.normal);
+ var string_buf = esm.version;
// const initial_pending_tasks = manager.pending_tasks;
var resolved_package_id: Install.PackageID = brk: {
@@ -1707,7 +1702,6 @@ pub const Resolver = struct {
// and try to look up the dependency from there
if (dir_info.package_json_for_dependencies) |package_json| {
var dependencies_list: []const Dependency = &[_]Dependency{};
- var string_buf: []const u8 = "";
const resolve_from_lockfile = package_json.package_manager_package_id != Install.invalid_package_id;
if (resolve_from_lockfile) {
@@ -1723,24 +1717,21 @@ pub const Resolver = struct {
}
for (dependencies_list, 0..) |dependency, dependency_id| {
- const dep_name = dependency.name.slice(string_buf);
- if (dep_name.len == esm.name.len) {
- if (!strings.eqlLong(dep_name, esm.name, false)) {
- continue;
- }
+ if (!strings.eqlLong(dependency.name.slice(string_buf), esm.name, true)) {
+ continue;
+ }
- dependency_version = dependency.version;
- dependency_behavior = dependency.behavior;
+ dependency_version = dependency.version;
+ dependency_behavior = dependency.behavior;
- if (resolve_from_lockfile) {
- const resolutions = &manager.lockfile.packages.items(.resolutions)[package_json.package_manager_package_id];
+ if (resolve_from_lockfile) {
+ const resolutions = &manager.lockfile.packages.items(.resolutions)[package_json.package_manager_package_id];
- // found it!
- break :brk resolutions.get(manager.lockfile.buffers.resolutions.items)[dependency_id];
- }
-
- break;
+ // found it!
+ break :brk resolutions.get(manager.lockfile.buffers.resolutions.items)[dependency_id];
}
+
+ break;
}
}
@@ -1770,6 +1761,7 @@ pub const Resolver = struct {
if (esm_.?.version.len > 0 and dir_info.enclosing_package_json != null and global_cache.allowVersionSpecifier()) {
return .{ .failure = error.VersionSpecifierNotAllowedHere };
}
+ string_buf = esm.version;
dependency_version = Dependency.parse(
r.allocator,
Semver.String.init(esm.name, esm.name),
@@ -1795,6 +1787,7 @@ pub const Resolver = struct {
dependency_behavior,
&resolved_package_id,
dependency_version,
+ string_buf,
)) {
.resolution => |res| break :brk res,
.pending => |pending| return .{ .pending = pending },
@@ -2073,6 +2066,7 @@ pub const Resolver = struct {
behavior: Dependency.Behavior,
input_package_id_: *Install.PackageID,
version: Dependency.Version,
+ version_buf: []const u8,
) DependencyToResolve {
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Enqueueing pending dependency \"{s}@{s}\"", .{ esm.name, esm.version });
@@ -2135,7 +2129,7 @@ pub const Resolver = struct {
// All packages are enqueued to the root
// because we download all the npm package dependencies
- switch (pm.enqueueDependencyToRoot(esm.name, esm.version, &version, behavior)) {
+ switch (pm.enqueueDependencyToRoot(esm.name, &version, version_buf, behavior)) {
.resolution => |result| {
input_package_id_.* = result.package_id;
return .{ .resolution = result.resolution };
diff --git a/test/cli/install/bun-run.test.ts b/test/cli/install/bun-run.test.ts
index 9ab094f08..95f33ebb8 100644
--- a/test/cli/install/bun-run.test.ts
+++ b/test/cli/install/bun-run.test.ts
@@ -24,7 +24,37 @@ const { minify } = require("uglify-js@3.17.4");
console.log(minify("print(6 * 7)").code);
`,
);
- const { stdout, stderr, exited } = spawn({
+ const {
+ stdout: stdout1,
+ stderr: stderr1,
+ exited: exited1,
+ } = spawn({
+ cmd: [bunExe(), "run", "test.js"],
+ cwd: run_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env: {
+ ...env,
+ BUN_INSTALL_CACHE_DIR: join(run_dir, ".cache"),
+ },
+ });
+ expect(stderr1).toBeDefined();
+ const err1 = await new Response(stderr1).text();
+ expect(err1).toBe("");
+ expect(await readdirSorted(run_dir)).toEqual([".cache", "test.js"]);
+ expect(await readdirSorted(join(run_dir, ".cache"))).toContain("uglify-js");
+ expect(await readdirSorted(join(run_dir, ".cache", "uglify-js"))).toEqual(["3.17.4"]);
+ expect(stdout1).toBeDefined();
+ const out1 = await new Response(stdout1).text();
+ expect(out1.split(/\r?\n/)).toEqual(["print(42);", ""]);
+ expect(await exited1).toBe(0);
+ // Perform `bun test.js` with cached dependencies
+ const {
+ stdout: stdout2,
+ stderr: stderr2,
+ exited: exited2,
+ } = spawn({
cmd: [bunExe(), "test.js"],
cwd: run_dir,
stdout: null,
@@ -35,14 +65,16 @@ console.log(minify("print(6 * 7)").code);
BUN_INSTALL_CACHE_DIR: join(run_dir, ".cache"),
},
});
- expect(stderr).toBeDefined();
- const err = await new Response(stderr).text();
- expect(err).toBe("");
- expect(stdout).toBeDefined();
- const out = await new Response(stdout).text();
- expect(out.split(/\r?\n/)).toEqual(["print(42);", ""]);
- expect(await exited).toBe(0);
+ expect(stderr2).toBeDefined();
+ const err2 = await new Response(stderr2).text();
+ expect(err2).toBe("");
expect(await readdirSorted(run_dir)).toEqual([".cache", "test.js"]);
+ expect(await readdirSorted(join(run_dir, ".cache"))).toContain("uglify-js");
+ expect(await readdirSorted(join(run_dir, ".cache", "uglify-js"))).toEqual(["3.17.4"]);
+ expect(stdout2).toBeDefined();
+ const out2 = await new Response(stdout2).text();
+ expect(out2.split(/\r?\n/)).toEqual(["print(42);", ""]);
+ expect(await exited2).toBe(0);
});
it("should download dependencies to run local file", async () => {
@@ -58,7 +90,11 @@ for (const entry of await decompress(Buffer.from(buffer))) {
}
`,
);
- const { stdout, stderr, exited } = spawn({
+ const {
+ stdout: stdout1,
+ stderr: stderr1,
+ exited: exited1,
+ } = spawn({
cmd: [bunExe(), "test.js"],
cwd: run_dir,
stdout: null,
@@ -69,9 +105,49 @@ for (const entry of await decompress(Buffer.from(buffer))) {
BUN_INSTALL_CACHE_DIR: join(run_dir, ".cache"),
},
});
- expect(stderr).toBeDefined();
- const err = await new Response(stderr).text();
- expect(err).toBe("");
+ expect(stderr1).toBeDefined();
+ const err1 = await new Response(stderr1).text();
+ expect(err1).toBe("");
+ expect(await readdirSorted(run_dir)).toEqual([".cache", "test.js"]);
+ expect(await readdirSorted(join(run_dir, ".cache"))).toContain("decompress");
+ expect(await readdirSorted(join(run_dir, ".cache", "decompress"))).toEqual(["4.2.1"]);
+ expect(await readdirSorted(join(run_dir, ".cache", "decompress", "4.2.1"))).toEqual([
+ "index.js",
+ "license",
+ "package.json",
+ "readme.md",
+ ]);
+ expect(await file(join(run_dir, ".cache", "decompress", "4.2.1", "index.js")).text()).toContain(
+ "\nmodule.exports = ",
+ );
+ expect(stdout1).toBeDefined();
+ const out1 = await new Response(stdout1).text();
+ expect(out1.split(/\r?\n/)).toEqual([
+ "directory: package/",
+ "file: package/index.js",
+ "file: package/package.json",
+ "",
+ ]);
+ expect(await exited1).toBe(0);
+ // Perform `bun run test.js` with cached dependencies
+ const {
+ stdout: stdout2,
+ stderr: stderr2,
+ exited: exited2,
+ } = spawn({
+ cmd: [bunExe(), "run", "test.js"],
+ cwd: run_dir,
+ stdout: null,
+ stdin: "pipe",
+ stderr: "pipe",
+ env: {
+ ...env,
+ BUN_INSTALL_CACHE_DIR: join(run_dir, ".cache"),
+ },
+ });
+ expect(stderr2).toBeDefined();
+ const err2 = await new Response(stderr2).text();
+ expect(err2).toBe("");
expect(await readdirSorted(run_dir)).toEqual([".cache", "test.js"]);
expect(await readdirSorted(join(run_dir, ".cache"))).toContain("decompress");
expect(await readdirSorted(join(run_dir, ".cache", "decompress"))).toEqual(["4.2.1"]);
@@ -84,13 +160,13 @@ for (const entry of await decompress(Buffer.from(buffer))) {
expect(await file(join(run_dir, ".cache", "decompress", "4.2.1", "index.js")).text()).toContain(
"\nmodule.exports = ",
);
- expect(stdout).toBeDefined();
- const out = await new Response(stdout).text();
- expect(out.split(/\r?\n/)).toEqual([
+ expect(stdout2).toBeDefined();
+ const out2 = await new Response(stdout2).text();
+ expect(out2.split(/\r?\n/)).toEqual([
"directory: package/",
"file: package/index.js",
"file: package/package.json",
"",
]);
- expect(await exited).toBe(0);
+ expect(await exited2).toBe(0);
});