aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-11-06 21:42:05 -0800
committerGravatar GitHub <noreply@github.com> 2022-11-06 21:42:05 -0800
commite45f72e8e422191adeb4fd1bad896dc6a47c76b3 (patch)
tree3a76da8b343c081dba84e0ac95f3c2cc2423106a
parent645cf903350a7fe5f5076100b7c4a6bc8cd1b431 (diff)
downloadbun-e45f72e8e422191adeb4fd1bad896dc6a47c76b3.tar.gz
bun-e45f72e8e422191adeb4fd1bad896dc6a47c76b3.tar.zst
bun-e45f72e8e422191adeb4fd1bad896dc6a47c76b3.zip
Automatically install npm packages when running a script in Bun's runtime (#1459)
* Update bundler.zig * WIP * Update README.md * Update README.md * wip * Support running scripts without package.json * Add `--no-auto-install` and `--prefer-offline` flags * WIP * wip * Update headers-handwritten.h * WIP * Build fixes * Fix UAF * Update install.zig * Must call .allocate() * Micro-optimization: only call .timestamp() once per tick when installing packages * Support progress bar * Extend the timestamp for package staleness checks to 1 day * Add `--prefer-latest`, `-i` CLI Flags * Fix crash * Support line text manually being set on an Error instance * Add a few more fields for error messages * Fix bug when counting 8 character strings in string builder * Implement error handling for automatic package installs! * Fix crash * Make it say module when there's a slash * Update module_loader.zig * Ban dependency versions in import specifiers when a package.json is present * Remove unused field * Update README.md * Update README.md * Update README.md * Update README.md Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
-rw-r--r--README.md241
-rw-r--r--src/analytics/analytics_thread.zig2
-rw-r--r--src/bun.js/bindings/ModuleLoader.cpp33
-rw-r--r--src/bun.js/bindings/bindings.cpp16
-rw-r--r--src/bun.js/bindings/exports.zig4
-rw-r--r--src/bun.js/bindings/headers-handwritten.h4
-rw-r--r--src/bun.js/event_loop.zig7
-rw-r--r--src/bun.js/javascript.zig1387
-rw-r--r--src/bun.js/module_loader.zig2052
-rw-r--r--src/bun_js.zig15
-rw-r--r--src/bundler.zig85
-rw-r--r--src/bunfig.zig40
-rw-r--r--src/c.zig23
-rw-r--r--src/cache.zig13
-rw-r--r--src/cli.zig39
-rw-r--r--src/cli/test_command.zig3
-rw-r--r--src/css_scanner.zig1
-rw-r--r--src/global.zig15
-rw-r--r--src/install/dependency.zig52
-rw-r--r--src/install/extract_tarball.zig3
-rw-r--r--src/install/install.zig1170
-rw-r--r--src/install/lockfile.zig185
-rw-r--r--src/install/npm.zig6
-rw-r--r--src/install/resolvers/folder_resolver.zig53
-rw-r--r--src/install/semver.zig116
-rw-r--r--src/js_ast.zig12
-rw-r--r--src/jsc.zig1
-rw-r--r--src/linker.zig196
-rw-r--r--src/logger.zig57
-rw-r--r--src/options.zig22
-rw-r--r--src/output.zig2
-rw-r--r--src/resolver/dir_info.zig27
-rw-r--r--src/resolver/package_json.zig326
-rw-r--r--src/resolver/resolver.zig1100
-rw-r--r--src/string_builder.zig7
35 files changed, 5529 insertions, 1786 deletions
diff --git a/README.md b/README.md
index 096a45439..fcccdc4d6 100644
--- a/README.md
+++ b/README.md
@@ -175,6 +175,7 @@ bun upgrade --canary
- [`Bun.Transpiler.scan`](#buntranspilerscan)
- [`Bun.Transpiler.scanImports`](#buntranspilerscanimports)
- [`Bun.peek` - read a promise same-tick](#bunpeek---read-a-promise-without-resolving-it)
+- [Module resolution in Bun](#module-resolution-in-bun)
- [Environment variables](#environment-variables)
- [Credits](#credits)
- [License](#license)
@@ -4394,6 +4395,246 @@ export const loader = () => import('./loader');
]
```
+## Module resolution in Bun
+
+Loading & resolving modules in JavaScript & TypeScript is complicated.
+
+### Module loading
+
+Bun supports CommonJS and ESM. Bun automatically transpiles CommonJS into synchronous ESM. Bun assumes projects internally use ESM, but dependencies may use either ESM or CommonJS. Using CommonJS is discouraged in new projects because it lacks support for top-level await and syntax like `export * from` is less reliable in CommonJS (without bundling the entire dependency tree ahead of time).
+
+Bun supports the `"bun"` `exports` condition and will prefer that over other conditions.
+
+To load a bun-specific module, set the `"bun"` export condition in package.json:
+
+```json
+{
+ "name": "foo",
+ "exports": {
+ "bun": "./index.bun.js",
+ "default": "./index.js"
+ }
+}
+```
+
+When importing `"foo"` in a bun project, it will load `./index.bun.js` instead of `./index.js`.
+
+If the library strictly uses ESM (excluding dependencies), specify `"type": "module"` in package.json.
+
+```json
+{
+ "name": "foo",
+ "type": "module"
+}
+```
+
+### Module resolution
+
+Bun implements the Node Module Resolution Algorithm, which is the same algorithm used by Node.js. It is also the same algorithm used by Webpack, Rollup, esbuild and many other bundlers.
+
+```js
+import "./foo";
+// This will check:
+// - ./foo.ts
+// - ./foo.tsx
+// - ./foo.js
+// - ./foo.mjs
+// - ./foo.cjs
+// - ./foo/index.ts
+// - ./foo/index.js
+// - ./foo/index.json
+// - ./foo/index.mjs
+```
+
+Bun also supports `package.json`, including `exports`, `imports`, `module`, `main` and `browser` fields.
+
+Bun supports `tsconfig.json`'s `paths` field, allowing you to override how paths resolve.
+
+```json
+{
+ "compilerOptions": {
+ "paths": {
+ "foo": ["./bar"]
+ }
+ }
+}
+```
+
+```js
+import "foo";
+// This will check:
+// - ./bar
+// - ./bar.ts
+// - ./bar.tsx
+// - ./bar.js
+// - ./bar.mjs
+// - ./bar.cjs
+// - ./bar/index.ts
+// - ./bar/index.js
+// - ./bar/index.json
+// - ./bar/index.mjs
+```
+
+You can also use `jsconfig.json` if you don't want to use TypeScript.
+
+### Bun's Module Resolution Algorithm
+
+Bun's module resolution algorithm is a lot like Node's except one key difference: `node_modules` folder is optional and `package.json` is optional.
+
+Highlights:
+
+- Automatic package installs, there is no need to run `npm install` or `bun install` before running a project.
+- Save disk space & time by not copying/linking dependencies into `node_modules`. Instead, Bun uses a shared global cache to store & load dependencies from a single location on disk.
+- `package.json` is optional
+- Compatible with npm packages
+- Security: dependencies can't run postinstall scripts. No malicious code can be run when installing dependencies.
+- One name@version of a dependency is used instead of multiple copies. Bun still supports different versions of the same package. Since `node_modules` relies on directory structure to resolve dependencies, projects often end up with the same name@version of a dependency installed multiple times in one `node_modules` folder. This is not a problem with Bun's module resolution algorithm because it uses a single flat global cache for all dependencies on your computer.
+
+For ecosystem compatibility, when the `node_modules` folder is present, it will be used to resolve modules like in node and Bun-specific features like automatic package installs are disabled.
+
+When the `node_modules` folder is _not_ present, that's when it gets more interesting.
+
+Bun lazily loads a lockfile for the project. It checks for `bun.lockb` in the project root.
+
+If the lockfile is present, the lockfile will be used to resolve modules first. If the lockfile is not present, the lockfile will be lazily generated.
+
+```js
+// unspecified version:
+import React from "react";
+
+// set version in import specifier: (only supported if no package.json is present)
+import React18_2 from "react@18.2.0";
+
+// range version: (only supported if no package.json is present)
+import React18 from "react@^18";
+```
+
+It will check the lockfile for the version. If the lockfile doesn't have a version, it will check the nearest `package.json` for a `react` dependency and use that version. If the `package.json` doesn't have a `react` dependency, it will use the latest version. If the latest version is not installed in the global cache, it will download the latest version from npm's registry.
+
+Lowlights:
+
+- TypeScript type support isn't implmented yet
+- patch package support isn't implemented yet
+
+#### Resolving packages
+
+`bun install` uses a shared global cache. After packages install, we add a symlink indexing the version of the package to allow us to quickly see what versions of a package are installed on-disk.
+
+Bun's module resolver shares the same global cache as `bun install` so it can resolve packages without needing to install them into a local `node_modules` folder. Instead, it uses the global cache to resolve packages.
+
+When you import a module like `lodash` in Bun without a node_modules folder, here's what happens:
+
+```js
+import { debounce } from "lodash";
+```
+
+1. Check if auto-install is enabled and if so, load the lockfile or lazily generate it.
+2. Check if the lockfile has an existing dependency for `lodash`.
+3. If the lockfile has an existing dependency for `lodash`, use the resolved version specified in the lockfile.
+4. If the lockfile does not have an existing resolved version for `lodash`, check if there is a `package.json` in the current or parent directories with a dependency on `lodash`. If so, use that as the range specifier for the dependency. Since no version is specified for the `"lodash"` string, it is assumed that you meant the version specified in the `package.json`'s `"dependencies"` field.
+5. If there is no `package.json` in the current or parent directories which has a dependency on `lodash`, use the latest version of `lodash` listed in the package manifest cache. If the package manifest cache is out of date, refresh it.
+6. Once a version is chosen, add the dependency to the lockfile and all the dependencies of the dependency to the lockfile.
+7. Return the file path to the resolved version of `lodash`. If the version is not installed on-disk, download &amp; install it, along with all dependencies.
+
+More details:
+
+- `bun install` ordinarily caches package manifests from npm for up to 5 minutes. For automatic package installs in Bun's JavaScript runtime, that cache is refreshed daily instead. This is to minimize startup time cost when no lockfile is present.
+- The `BUN_INSTALL_CACHE_DIR` environment variable lets you customize the location of the cache. Currently, it defaults to `$XDG_CACHE_HOME/.bun/install/cache` or `$HOME/.bun/install/cache`.
+- The implementation details of Bun's install cache will change between versions. Don't think of it as an API. To reliably resolve packages, use bun's builtin APIs (such as `Bun.resolveSync` or `import.meta.resolve`) instead of relying on the filesystem directly. Bun will likely move to a binary archive format where packages may not correspond to files/folders on disk at all - so if you depend on the filesystem structure instead of the JavaScript API, your code will eventually break.
+
+##### Prefer offline
+
+Instead of checking npm for latest versions, you can pass the `--prefer-offline` flag to prefer locally-installed versions of packages.
+
+```bash
+bun run --prefer-offline my-script.ts
+```
+
+This will check the install cache for installed versions of packages before checking the npm registry. If no matching version of a package is installed, only then will it check npm for the latest version.
+
+##### Prefer latest
+
+To always use the latest version of a package, you can pass the `--prefer-latest` flag.
+
+```bash
+bun run --prefer-latest my-script.ts
+```
+
+#### Resolving modules
+
+After ensuring the package is installed, Bun resolves the module.
+
+For the most part, this is the same as what Node.js does.
+
+The main difference is instead of looking for `node_modules` folders to resolve packages, Bun looks in bun's install cache.
+
+This only activates for "package paths". That is, paths that start with a package name. For example, `lodash/debounce` is a package path. `./foo` is not a package path.
+
+#### Frequently asked questions
+
+**How is this different than what Node.js does?**
+
+Per-project `node_modules` folders are not necessary when using Bun. This saves you disk space and time spent copying/linking dependencies into `node_modules` folders for every project.
+
+| Runtime | require/import | in package | resolution |
+| ------- | -------------- | ---------- | ------------------------------------------------------ |
+| Node.js | "react-dom" | "react" | `./node_modules/react-dom/index.js` |
+| Bun | "react-dom" | "react" | `$BUN_INSTALL_CACHE_DIR/react-dom@${version}/index.js` |
+
+When multiple versions of a package are installed, Node.js relies on the directory tree structure to resolve the correct version.
+
+Bun uses bun's lockfile to figure out what the version SHOULD be and then installs it into the global cache (if that version is not already installed).
+
+With a dependency tree like this:
+
+```zig
+- root 1.0.0
+ - dep1 1.0.0
+ - dep2 1.0.0
+ - dep3 1.0.0
+ - dep2 2.0.0
+ - dep2 2.0.0
+```
+
+To satisfy the Node.js Module Resolution algorithm, npm clients are forced to install the same version of `dep2` **multiple times for the same project**. This is because `dep2` is a dependency of `dep1` and `dep3`, and `dep3` depends on a different version of `dep2` than `dep1`.
+
+With Bun, the lockfile is used to resolve the correct version of `dep2` to use. This means that `dep2` only needs to be installed once and it will be reused for all projects on your computer that depend on it.
+
+![image](https://user-images.githubusercontent.com/709451/198907459-710d5299-bac0-40d8-b630-8112d42900e1.png)
+
+**How do I debug an issue with a dependency?**
+
+If you run `bun install` (or any other npm package manager), it will create a `node_modules` folder and install dependencies into that folder.
+
+Bun's runtime continues to fully support the `node_modules` folder.
+
+**How is this different than what pnpm does?**
+
+With pnpm, you have to run `pnpm install`, which creates a `node_modules` folder of symlinks for the runtime to resolve.
+
+With Bun, you don't have to run any commands to install dependencies. Bun doesn't create a `node_modules` folder.
+
+Just run `bun run foo.js` and it will automatically install the dependencies for you on-demand.
+
+**How is this different than Yarn Plug'N'Play does?**
+
+Two things:
+
+1. Bun's JavaScript runtime automatically installs dependencies when running a file. With yarn, you have to run `yarn install` before you run a script.
+2. Yarn Plug'N'Play [makes loading dependencies slower](https://twitter.com/jarredsumner/status/1458207919636287490) at runtime because under the hood, it uses zip files to store dependencies. zip files tend not to be as performant for random access reads as files on disk.
+
+**How is this different than what Deno does?**
+
+Deno requires an `npm:` specifier before each npm import, lacks support for reading `"paths"` from tsconfig.json, and doesn't have as much support for package.json fields.
+
+**What about security?**
+
+Bun doesn't run postinstall scripts. No malicious code can be run from installing dependencies.
+
+**Can I use bun install with Node.js projects?**
+
+Yes. `bun install` creates a ordinary node_modules folder. It's designed to be compatible with other package managers and Node.js.
+
## Environment variables
- `GOMAXPROCS`: For `bun bun`, this sets the maximum number of threads to use. If you’re experiencing an issue with `bun bun`, try setting `GOMAXPROCS=1` to force bun to run single-threaded
diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig
index 75cd3165b..b57a1d8a4 100644
--- a/src/analytics/analytics_thread.zig
+++ b/src/analytics/analytics_thread.zig
@@ -52,6 +52,7 @@ pub const Features = struct {
pub var external = false;
pub var fetch = false;
pub var bunfig = false;
+ pub var extracted_packages = false;
pub fn formatter() Formatter {
return Formatter{};
@@ -79,6 +80,7 @@ pub const Features = struct {
"external",
"fetch",
"bunfig",
+ "extracted_packages",
};
inline for (fields) |field| {
if (@field(Features, field)) {
diff --git a/src/bun.js/bindings/ModuleLoader.cpp b/src/bun.js/bindings/ModuleLoader.cpp
index cb5ff0864..45f3cc46b 100644
--- a/src/bun.js/bindings/ModuleLoader.cpp
+++ b/src/bun.js/bindings/ModuleLoader.cpp
@@ -325,6 +325,29 @@ static JSValue handleVirtualModuleResult(
}
}
+extern "C" void Bun__onFulfillAsyncModule(
+ EncodedJSValue promiseValue,
+ ErrorableResolvedSource* res,
+ ZigString* specifier,
+ ZigString* referrer)
+{
+ JSC::JSValue value = JSValue::decode(promiseValue);
+ JSC::JSInternalPromise* promise = jsCast<JSC::JSInternalPromise*>(value);
+ auto* globalObject = promise->globalObject();
+ auto& vm = globalObject->vm();
+ auto scope = DECLARE_THROW_SCOPE(vm);
+
+ if (!res->success) {
+ throwException(scope, res->result.err, globalObject);
+ auto* exception = scope.exception();
+ scope.clearException();
+ return promise->reject(promise->globalObject(), exception);
+ }
+
+ auto provider = Zig::SourceProvider::create(res->result.value);
+ promise->resolve(promise->globalObject(), JSC::JSSourceCode::create(vm, JSC::SourceCode(provider)));
+}
+
template<bool allowPromise>
static JSValue fetchSourceCode(
Zig::GlobalObject* globalObject,
@@ -435,7 +458,15 @@ static JSValue fetchSourceCode(
return handleVirtualModuleResult<allowPromise>(globalObject, virtualModuleResult, res, specifier, referrer);
}
- Bun__transpileFile(bunVM, globalObject, specifier, referrer, res);
+ if constexpr (allowPromise) {
+ void* pendingCtx = Bun__transpileFile(bunVM, globalObject, specifier, referrer, res, true);
+ if (pendingCtx) {
+ return reinterpret_cast<JSC::JSInternalPromise*>(pendingCtx);
+ }
+ } else {
+ Bun__transpileFile(bunVM, globalObject, specifier, referrer, res, false);
+ }
+
if (!res->success) {
throwException(scope, res->result.err, globalObject);
auto* exception = scope.exception();
diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp
index 65c2c458f..cc6c237ed 100644
--- a/src/bun.js/bindings/bindings.cpp
+++ b/src/bun.js/bindings/bindings.cpp
@@ -2419,12 +2419,22 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global,
if (JSC::JSValue sourceURL = obj->getIfPropertyExists(global, global->vm().propertyNames->sourceURL)) {
except->stack.frames_ptr[0].source_url = Zig::toZigString(sourceURL, global);
+ if (JSC::JSValue column = obj->getIfPropertyExists(global, global->vm().propertyNames->column)) {
+ except->stack.frames_ptr[0].position.column_start = column.toInt32(global);
+ }
+
if (JSC::JSValue line = obj->getIfPropertyExists(global, global->vm().propertyNames->line)) {
except->stack.frames_ptr[0].position.line = line.toInt32(global);
- }
- if (JSC::JSValue column = obj->getIfPropertyExists(global, global->vm().propertyNames->column)) {
- except->stack.frames_ptr[0].position.column_start = column.toInt32(global);
+ if (JSC::JSValue lineText = obj->getIfPropertyExists(global, JSC::Identifier::fromString(global->vm(), "lineText"_s))) {
+ if (JSC::JSString* jsStr = lineText.toStringOrNull(global)) {
+ auto str = jsStr->value(global);
+ except->stack.source_lines_ptr[0] = Zig::toZigString(str);
+ except->stack.source_lines_numbers[0] = except->stack.frames_ptr[0].position.line;
+ except->stack.source_lines_len = 1;
+ except->remapped = true;
+ }
+ }
}
except->stack.frames_len = 1;
}
diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig
index eb4f33001..f534a3677 100644
--- a/src/bun.js/bindings/exports.zig
+++ b/src/bun.js/bindings/exports.zig
@@ -2555,7 +2555,9 @@ pub const ZigConsoleClient = struct {
_: usize,
// args
_: *ScriptArguments,
- ) callconv(.C) void {}
+ ) callconv(.C) void {
+
+ }
pub fn profile(
// console
_: ZigConsoleClient.Type,
diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h
index 5b8f6ad8d..553259ec0 100644
--- a/src/bun.js/bindings/headers-handwritten.h
+++ b/src/bun.js/bindings/headers-handwritten.h
@@ -229,12 +229,12 @@ extern "C" JSC::EncodedJSValue Bun__runVirtualModule(
JSC::JSGlobalObject* global,
ZigString* specifier);
-extern "C" bool Bun__transpileFile(
+extern "C" void* Bun__transpileFile(
void* bunVM,
JSC::JSGlobalObject* global,
ZigString* specifier,
ZigString* referrer,
- ErrorableResolvedSource* result);
+ ErrorableResolvedSource* result, bool allowPromise);
extern "C" JSC::EncodedJSValue CallbackJob__onResolve(JSC::JSGlobalObject*, JSC::CallFrame*);
extern "C" JSC::EncodedJSValue CallbackJob__onReject(JSC::JSGlobalObject*, JSC::CallFrame*);
diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig
index d87a7edcb..0c99a949a 100644
--- a/src/bun.js/event_loop.zig
+++ b/src/bun.js/event_loop.zig
@@ -183,6 +183,7 @@ pub const CppTask = opaque {
const ThreadSafeFunction = JSC.napi.ThreadSafeFunction;
const MicrotaskForDefaultGlobalObject = JSC.MicrotaskForDefaultGlobalObject;
const HotReloadTask = JSC.HotReloader.HotReloadTask;
+const PollPendingModulesTask = JSC.ModuleLoader.AsyncModule.Queue;
// const PromiseTask = JSInternalPromise.Completion.PromiseTask;
pub const Task = TaggedPointerUnion(.{
FetchTasklet,
@@ -197,6 +198,7 @@ pub const Task = TaggedPointerUnion(.{
ThreadSafeFunction,
CppTask,
HotReloadTask,
+ PollPendingModulesTask,
// PromiseTask,
// TimeoutTasklet,
});
@@ -223,7 +225,7 @@ pub const EventLoop = struct {
tasks: Queue = undefined,
concurrent_tasks: ConcurrentTask.Queue = ConcurrentTask.Queue{},
global: *JSGlobalObject = undefined,
- virtual_machine: *VirtualMachine = undefined,
+ virtual_machine: *JSC.VirtualMachine = undefined,
waker: ?AsyncIO.Waker = null,
start_server_on_next_tick: bool = false,
defer_count: std.atomic.Atomic(usize) = std.atomic.Atomic(usize).init(0),
@@ -289,6 +291,9 @@ pub const EventLoop = struct {
var any: *CppTask = task.get(CppTask).?;
any.run(global);
},
+ @field(Task.Tag, typeBaseName(@typeName(PollPendingModulesTask))) => {
+ this.virtual_machine.modules.onPoll();
+ },
else => if (Environment.allow_assert) {
bun.Output.prettyln("\nUnexpected tag: {s}\n", .{@tagName(task.tag())});
} else unreachable,
diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig
index 7a78db237..4068e9cfe 100644
--- a/src/bun.js/javascript.zig
+++ b/src/bun.js/javascript.zig
@@ -16,9 +16,6 @@ const Arena = @import("../mimalloc_arena.zig").Arena;
const C = bun.C;
const NetworkThread = @import("http").NetworkThread;
const IO = @import("io");
-pub fn zigCast(comptime Destination: type, value: anytype) *Destination {
- return @ptrCast(*Destination, @alignCast(@alignOf(*Destination), value));
-}
const Allocator = std.mem.Allocator;
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const Fs = @import("../fs.zig");
@@ -26,6 +23,7 @@ const Resolver = @import("../resolver/resolver.zig");
const ast = @import("../import_record.zig");
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
const MacroEntryPoint = @import("../bundler.zig").MacroEntryPoint;
+const ParseResult = @import("../bundler.zig").ParseResult;
const logger = @import("../logger.zig");
const Api = @import("../api/schema.zig").Api;
const options = @import("../options.zig");
@@ -45,7 +43,6 @@ const Runtime = @import("../runtime.zig");
const Router = @import("./api/router.zig");
const ImportRecord = ast.ImportRecord;
const DotEnv = @import("../env_loader.zig");
-const ParseResult = @import("../bundler.zig").ParseResult;
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
const WebCore = @import("../jsc.zig").WebCore;
@@ -86,7 +83,13 @@ const URL = @import("../url.zig").URL;
const Transpiler = @import("./api/transpiler.zig");
const Bun = JSC.API.Bun;
const EventLoop = JSC.EventLoop;
+const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
const ThreadSafeFunction = JSC.napi.ThreadSafeFunction;
+const PackageManager = @import("../install/install.zig").PackageManager;
+
+const ModuleLoader = JSC.ModuleLoader;
+const FetchFlags = JSC.FetchFlags;
+
pub const GlobalConstructors = [_]type{
JSC.Cloudflare.HTMLRewriter.Constructor,
};
@@ -126,7 +129,7 @@ pub fn OpaqueWrap(comptime Context: type, comptime Function: fn (this: *Context)
}.callback;
}
-const bun_file_import_path = "/node_modules.server.bun";
+pub const bun_file_import_path = "/node_modules.server.bun";
const SourceMap = @import("../sourcemap/sourcemap.zig");
const MappingList = SourceMap.Mapping.List;
@@ -346,6 +349,7 @@ pub const VirtualMachine = struct {
is_printing_plugin: bool = false,
plugin_runner: ?PluginRunner = null,
+ is_main_thread: bool = false,
/// Do not access this field directly
/// It exists in the VirtualMachine struct so that
@@ -404,9 +408,17 @@ pub const VirtualMachine = struct {
us_loop_reference_count: usize = 0,
is_us_loop_entered: bool = false,
pending_internal_promise: *JSC.JSInternalPromise = undefined,
-
+ auto_install_dependencies: bool = false,
load_builtins_from_path: []const u8 = "",
+ modules: ModuleLoader.AsyncModule.Queue = .{},
+
+ pub threadlocal var is_main_thread_vm: bool = false;
+
+ pub inline fn packageManager(this: *VirtualMachine) *PackageManager {
+ return this.bundler.getPackageManager();
+ }
+
pub fn reload(this: *VirtualMachine) void {
Output.debug("Reloading...", .{});
this.global.reload();
@@ -462,7 +474,7 @@ pub const VirtualMachine = struct {
this.eventLoop().enqueueTask(task);
}
- pub inline fn enqueueTaskConcurrent(this: *VirtualMachine, task: JSC.ConcurrentTask) void {
+ pub inline fn enqueueTaskConcurrent(this: *VirtualMachine, task: *JSC.ConcurrentTask) void {
this.eventLoop().enqueueTaskConcurrent(task);
}
@@ -627,6 +639,12 @@ pub const VirtualMachine = struct {
vm.bundler.macro_context = null;
+ VirtualMachine.vm.bundler.resolver.onWakePackageManager = .{
+ .context = &VirtualMachine.vm.modules,
+ .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler,
+ .onDependencyError = JSC.ModuleLoader.AsyncModule.Queue.onDependencyError,
+ };
+
VirtualMachine.vm.bundler.configureLinker();
try VirtualMachine.vm.bundler.configureFramework(false);
@@ -742,422 +760,18 @@ pub const VirtualMachine = struct {
this.resolved_count = 0;
}
- const shared_library_suffix = if (Environment.isMac) "dylib" else if (Environment.isLinux) "so" else "";
-
- pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: string, log: *logger.Log, comptime disable_transpilying: bool) !?ResolvedSource {
- if (jsc_vm.node_modules != null and strings.eqlComptime(specifier, bun_file_import_path)) {
- // We kind of need an abstraction around this.
- // Basically we should subclass JSC::SourceCode with:
- // - hash
- // - file descriptor for source input
- // - file path + file descriptor for bytecode caching
- // - separate bundles for server build vs browser build OR at least separate sections
- const code = try jsc_vm.node_modules.?.readCodeAsStringSlow(jsc_vm.allocator);
-
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(code),
- .specifier = ZigString.init(bun_file_import_path),
- .source_url = ZigString.init(bun_file_import_path[1..]),
- .hash = 0, // TODO
- };
- } else if (jsc_vm.node_modules == null and strings.eqlComptime(specifier, Runtime.Runtime.Imports.Name)) {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(Runtime.Runtime.sourceContentBun()),
- .specifier = ZigString.init(Runtime.Runtime.Imports.Name),
- .source_url = ZigString.init(Runtime.Runtime.Imports.Name),
- .hash = Runtime.Runtime.versionHash(),
- };
- } else if (HardcodedModule.Map.get(specifier)) |hardcoded| {
- switch (hardcoded) {
- // This is all complicated because the imports have to be linked and we want to run the printer on it
- // so it consistently handles bundled imports
- // we can't take the shortcut of just directly importing the file, sadly.
- .@"bun:main" => {
- if (comptime disable_transpilying) {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsc_vm.entry_point.source.contents),
- .specifier = ZigString.init(std.mem.span(main_file_name)),
- .source_url = ZigString.init(std.mem.span(main_file_name)),
- .hash = 0,
- };
- }
- defer jsc_vm.transpiled_count += 1;
-
- var bundler = &jsc_vm.bundler;
- var old = jsc_vm.bundler.log;
- jsc_vm.bundler.log = log;
- jsc_vm.bundler.linker.log = log;
- jsc_vm.bundler.resolver.log = log;
- defer {
- jsc_vm.bundler.log = old;
- jsc_vm.bundler.linker.log = old;
- jsc_vm.bundler.resolver.log = old;
- }
-
- var jsx = bundler.options.jsx;
- jsx.parse = false;
- var opts = js_parser.Parser.Options.init(jsx, .js);
- opts.enable_bundling = false;
- opts.transform_require_to_import = false;
- opts.features.dynamic_require = true;
- opts.can_import_from_bundle = bundler.options.node_modules_bundle != null;
- opts.features.hot_module_reloading = false;
- opts.features.react_fast_refresh = false;
- opts.filepath_hash_for_hmr = 0;
- opts.warn_about_unbundled_modules = false;
- opts.macro_context = &jsc_vm.bundler.macro_context.?;
- const main_ast = (bundler.resolver.caches.js.parse(jsc_vm.allocator, opts, bundler.options.define, bundler.log, &jsc_vm.entry_point.source) catch null) orelse {
- return error.ParseError;
- };
- var parse_result = ParseResult{ .source = jsc_vm.entry_point.source, .ast = main_ast, .loader = .js, .input_fd = null };
- var file_path = Fs.Path.init(bundler.fs.top_level_dir);
- file_path.name.dir = bundler.fs.top_level_dir;
- file_path.name.base = "bun:main";
- try bundler.linker.link(
- file_path,
- &parse_result,
- jsc_vm.origin,
- .absolute_path,
- false,
- true,
- );
- var printer = source_code_printer.?.*;
- var written: usize = undefined;
- printer.ctx.reset();
- {
- defer source_code_printer.?.* = printer;
- written = try jsc_vm.bundler.printWithSourceMap(
- parse_result,
- @TypeOf(&printer),
- &printer,
- .esm_ascii,
- SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
- );
- }
-
- if (comptime Environment.dump_source)
- try dumpSource(main_file_name, &printer);
-
- if (written == 0) {
- return error.PrintingErrorWriteFailed;
- }
-
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, printer.ctx.written) catch unreachable),
- .specifier = ZigString.init(std.mem.span(main_file_name)),
- .source_url = ZigString.init(std.mem.span(main_file_name)),
- .hash = 0,
- };
- },
- .@"bun:jsc" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "bun-jsc.exports.js")),
- .specifier = ZigString.init("bun:jsc"),
- .source_url = ZigString.init("bun:jsc"),
- .hash = 0,
- };
- },
- .@"node:child_process" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "child_process.exports.js")),
- .specifier = ZigString.init("node:child_process"),
- .source_url = ZigString.init("node:child_process"),
- .hash = 0,
- };
- },
- .@"node:net" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "net.exports.js")),
- .specifier = ZigString.init("node:net"),
- .source_url = ZigString.init("node:net"),
- .hash = 0,
- };
- },
- .@"node:fs" => {
- if (comptime Environment.isDebug) {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(strings.append(bun.default_allocator, jsModuleFromFile(jsc_vm.load_builtins_from_path, "fs.exports.js"), JSC.Node.fs.constants_string) catch unreachable),
- .specifier = ZigString.init("node:fs"),
- .source_url = ZigString.init("node:fs"),
- .hash = 0,
- };
- }
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(@embedFile("fs.exports.js") ++ JSC.Node.fs.constants_string),
- .specifier = ZigString.init("node:fs"),
- .source_url = ZigString.init("node:fs"),
- .hash = 0,
- };
- },
- .@"node:buffer" => return jsSyntheticModule(.@"node:buffer"),
- .@"node:string_decoder" => return jsSyntheticModule(.@"node:string_decoder"),
- .@"node:module" => return jsSyntheticModule(.@"node:module"),
- .@"node:events" => return jsSyntheticModule(.@"node:events"),
- .@"node:process" => return jsSyntheticModule(.@"node:process"),
- .@"node:tty" => return jsSyntheticModule(.@"node:tty"),
- .@"node:stream" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "streams.exports.js")),
- .specifier = ZigString.init("node:stream"),
- .source_url = ZigString.init("node:stream"),
- .hash = 0,
- };
- },
-
- .@"node:fs/promises" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(@embedFile("fs_promises.exports.js") ++ JSC.Node.fs.constants_string),
- .specifier = ZigString.init("node:fs/promises"),
- .source_url = ZigString.init("node:fs/promises"),
- .hash = 0,
- };
- },
- .@"node:path" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "path.exports.js")),
- .specifier = ZigString.init("node:path"),
- .source_url = ZigString.init("node:path"),
- .hash = 0,
- };
- },
- .@"node:path/win32" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "path-win32.exports.js")),
- .specifier = ZigString.init("node:path/win32"),
- .source_url = ZigString.init("node:path/win32"),
- .hash = 0,
- };
- },
- .@"node:path/posix" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "path-posix.exports.js")),
- .specifier = ZigString.init("node:path/posix"),
- .source_url = ZigString.init("node:path/posix"),
- .hash = 0,
- };
- },
-
- .@"node:os" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "os.exports.js")),
- .specifier = ZigString.init("node:os"),
- .source_url = ZigString.init("node:os"),
- .hash = 0,
- };
- },
- .@"bun:ffi" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- "export const FFIType = " ++
- JSC.FFI.ABIType.map_to_js_object ++
- ";\n\n" ++
- "export const suffix = '" ++ shared_library_suffix ++ "';\n\n" ++
- @embedFile("ffi.exports.js") ++
- "\n",
- ),
- .specifier = ZigString.init("bun:ffi"),
- .source_url = ZigString.init("bun:ffi"),
- .hash = 0,
- };
- },
- .@"detect-libc" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, @embedFile(if (Environment.isLinux) "detect-libc.linux.js" else "detect-libc.js")),
- ),
- .specifier = ZigString.init("detect-libc"),
- .source_url = ZigString.init("detect-libc"),
- .hash = 0,
- };
- },
- .@"node:url" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "url.exports.js")),
- ),
- .specifier = ZigString.init("node:url"),
- .source_url = ZigString.init("node:url"),
- .hash = 0,
- };
- },
- .@"node:assert" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "assert.exports.js")),
- ),
- .specifier = ZigString.init("node:assert"),
- .source_url = ZigString.init("node:assert"),
- .hash = 0,
- };
- },
- .@"bun:sqlite" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./bindings/sqlite/sqlite.exports.js")),
- ),
- .specifier = ZigString.init("bun:sqlite"),
- .source_url = ZigString.init("bun:sqlite"),
- .hash = 0,
- };
- },
- .@"node:perf_hooks" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./perf_hooks.exports.js")),
- ),
- .specifier = ZigString.init("node:perf_hooks"),
- .source_url = ZigString.init("node:perf_hooks"),
- .hash = 0,
- };
- },
- .@"ws" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./ws.exports.js")),
- ),
- .specifier = ZigString.init("ws"),
- .source_url = ZigString.init("ws"),
- .hash = 0,
- };
- },
- .@"node:timers" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_timers.exports.js")),
- ),
- .specifier = ZigString.init("node:timers"),
- .source_url = ZigString.init("node:timers"),
- .hash = 0,
- };
- },
- .@"node:timers/promises" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_timers_promises.exports.js")),
- ),
- .specifier = ZigString.init("node:timers/promises"),
- .source_url = ZigString.init("node:timers/promises"),
- .hash = 0,
- };
- },
- .@"node:stream/web" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_streams_web.exports.js")),
- ),
- .specifier = ZigString.init("node:stream/web"),
- .source_url = ZigString.init("node:stream/web"),
- .hash = 0,
- };
- },
- .@"node:stream/consumer" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_streams_consumer.exports.js")),
- ),
- .specifier = ZigString.init("node:stream/consumer"),
- .source_url = ZigString.init("node:stream/consumer"),
- .hash = 0,
- };
- },
- .@"undici" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./undici.exports.js")),
- ),
- .specifier = ZigString.init("undici"),
- .source_url = ZigString.init("undici"),
- .hash = 0,
- };
- },
- .@"node:http" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./http.exports.js")),
- ),
- .specifier = ZigString.init("node:http"),
- .source_url = ZigString.init("node:http"),
- .hash = 0,
- };
- },
- .@"node:https" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./https.exports.js")),
- ),
- .specifier = ZigString.init("node:https"),
- .source_url = ZigString.init("node:https"),
- .hash = 0,
- };
- },
- .@"depd" => {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(
- @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./depd.exports.js")),
- ),
- .specifier = ZigString.init("depd"),
- .source_url = ZigString.init("depd"),
- .hash = 0,
- };
- },
- }
- } else if (specifier.len > js_ast.Macro.namespaceWithColon.len and
- strings.eqlComptimeIgnoreLen(specifier[0..js_ast.Macro.namespaceWithColon.len], js_ast.Macro.namespaceWithColon))
- {
- if (jsc_vm.macro_entry_points.get(MacroEntryPoint.generateIDFromSpecifier(specifier))) |entry| {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(entry.source.contents),
- .specifier = ZigString.init(specifier),
- .source_url = ZigString.init(specifier),
- .hash = 0,
- };
- }
- }
-
- return null;
- }
-
pub fn fetchWithoutOnLoadPlugins(
jsc_vm: *VirtualMachine,
+ globalObject: *JSC.JSGlobalObject,
_specifier: string,
+ referrer: string,
log: *logger.Log,
ret: *ErrorableResolvedSource,
comptime flags: FetchFlags,
) !ResolvedSource {
std.debug.assert(VirtualMachine.vm_loaded);
- if (try fetchBuiltinModule(jsc_vm, _specifier, log, comptime flags.disableTranspiling())) |builtin| {
+ if (try ModuleLoader.fetchBuiltinModule(jsc_vm, _specifier, log, comptime flags.disableTranspiling())) |builtin| {
return builtin;
}
@@ -1174,12 +788,15 @@ pub const VirtualMachine = struct {
return try ModuleLoader.transpileSourceCode(
jsc_vm,
specifier,
+ referrer,
path,
loader,
log,
null,
ret,
+ null,
VirtualMachine.source_code_printer.?,
+ globalObject,
flags,
);
}
@@ -1221,7 +838,7 @@ pub const VirtualMachine = struct {
ret.result = null;
ret.path = specifier;
return;
- } else if (HardcodedModule.Map.get(specifier)) |result| {
+ } else if (JSC.HardcodedModule.Map.get(specifier)) |result| {
ret.result = null;
ret.path = @as(string, @tagName(result));
return;
@@ -1229,7 +846,7 @@ pub const VirtualMachine = struct {
const is_special_source = strings.eqlComptime(source, main_file_name) or js_ast.Macro.isMacroPath(source);
- const result = try jsc_vm.bundler.resolver.resolve(
+ const result = try switch (jsc_vm.bundler.resolver.resolveAndAutoInstall(
if (!is_special_source)
if (is_a_file_path)
Fs.PathName.init(source).dirWithTrailingSlash()
@@ -1240,7 +857,13 @@ pub const VirtualMachine = struct {
// TODO: do we need to handle things like query string params?
if (strings.hasPrefixComptime(specifier, "file://")) specifier["file://".len..] else specifier,
.stmt,
- );
+ .read_only,
+ )) {
+ .success => |r| r,
+ .failure => |e| e,
+ .not_found => error.ModuleNotFound,
+ .pending => unreachable,
+ };
if (!jsc_vm.macro_mode) {
jsc_vm.has_any_macro_remappings = jsc_vm.has_any_macro_remappings or jsc_vm.bundler.options.macro_remap.count() > 0;
@@ -1339,30 +962,50 @@ pub const VirtualMachine = struct {
}
}
- if (HardcodedModule.Aliases.getWithEql(specifier, ZigString.eqlComptime)) |hardcoded| {
+ if (JSC.HardcodedModule.Aliases.getWithEql(specifier, ZigString.eqlComptime)) |hardcoded| {
res.* = ErrorableZigString.ok(ZigString.init(hardcoded));
return;
}
+ var old_log = jsc_vm.log;
+ var log = logger.Log.init(jsc_vm.allocator);
+ defer log.deinit();
+ jsc_vm.log = &log;
+ jsc_vm.bundler.resolver.log = &log;
+ jsc_vm.bundler.linker.log = &log;
+ defer {
+ jsc_vm.log = old_log;
+ jsc_vm.bundler.linker.log = old_log;
+ jsc_vm.bundler.resolver.log = old_log;
+ }
+ _resolve(&result, global, specifier.slice(), source.slice(), is_a_file_path, realpath) catch |err_| {
+ var err = err_;
+ const msg: logger.Msg = brk: {
+ var msgs: []logger.Msg = log.msgs.items;
+
+ for (msgs) |m| {
+ if (m.metadata == .resolve) {
+ err = m.metadata.resolve.err;
+ break :brk m;
+ }
+ }
- _resolve(&result, global, specifier.slice(), source.slice(), is_a_file_path, realpath) catch |err| {
- // This should almost always just apply to dynamic imports
-
- const printed = ResolveError.fmt(
- jsc_vm.allocator,
- specifier.slice(),
- source.slice(),
- err,
- ) catch unreachable;
- const msg = logger.Msg{
- .data = logger.rangeData(
- null,
- logger.Range.None,
- printed,
- ),
- .metadata = .{
- // import_kind is wrong probably
- .resolve = .{ .specifier = logger.BabyString.in(printed, specifier.slice()), .import_kind = .stmt },
- },
+ const printed = ResolveError.fmt(
+ jsc_vm.allocator,
+ specifier.slice(),
+ source.slice(),
+ err,
+ ) catch unreachable;
+ break :brk logger.Msg{
+ .data = logger.rangeData(
+ null,
+ logger.Range.None,
+ printed,
+ ),
+ .metadata = .{
+ // import_kind is wrong probably
+ .resolve = .{ .specifier = logger.BabyString.in(printed, specifier.slice()), .import_kind = .stmt },
+ },
+ };
};
{
@@ -1385,24 +1028,27 @@ pub const VirtualMachine = struct {
// return JSValue.jsUndefined();
// }
- const main_file_name: string = "bun:main";
+ pub const main_file_name: string = "bun:main";
pub fn fetch(ret: *ErrorableResolvedSource, global: *JSGlobalObject, specifier: ZigString, source: ZigString) callconv(.C) void {
- var log = logger.Log.init(vm.bundler.allocator);
- const spec = specifier.slice();
- // threadlocal is cheaper in linux
var jsc_vm: *VirtualMachine = if (comptime Environment.isLinux)
vm
else
global.bunVM();
+ var log = logger.Log.init(vm.bundler.allocator);
+ var spec = specifier.toSlice(jsc_vm.allocator);
+ defer spec.deinit();
+ var refer = source.toSlice(jsc_vm.allocator);
+ defer refer.deinit();
+
const result = if (!jsc_vm.bundler.options.disable_transpilation)
- @call(.{ .modifier = .always_inline }, fetchWithoutOnLoadPlugins, .{ jsc_vm, spec, &log, ret, .transpile }) catch |err| {
+ @call(.{ .modifier = .always_inline }, fetchWithoutOnLoadPlugins, .{ jsc_vm, global, spec.slice(), refer.slice(), &log, ret, .transpile }) catch |err| {
processFetchLog(global, specifier, source, &log, ret, err);
return;
}
else
- fetchWithoutOnLoadPlugins(jsc_vm, spec, &log, ret, .print_source_and_clone) catch |err| {
+ fetchWithoutOnLoadPlugins(jsc_vm, global, spec.slice(), refer.slice(), &log, ret, .print_source_and_clone) catch |err| {
processFetchLog(global, specifier, source, &log, ret, err);
return;
};
@@ -1433,10 +1079,10 @@ pub const VirtualMachine = struct {
if (vm.blobs) |blobs| {
const specifier_blob = brk: {
- if (strings.hasPrefix(spec, VirtualMachine.vm.bundler.fs.top_level_dir)) {
- break :brk spec[VirtualMachine.vm.bundler.fs.top_level_dir.len..];
+ if (strings.hasPrefix(spec.slice(), VirtualMachine.vm.bundler.fs.top_level_dir)) {
+ break :brk spec.slice()[VirtualMachine.vm.bundler.fs.top_level_dir.len..];
}
- break :brk spec;
+ break :brk spec.slice();
};
if (vm.has_loaded) {
@@ -1893,7 +1539,7 @@ pub const VirtualMachine = struct {
)) |mapping| {
var log = logger.Log.init(default_allocator);
var errorable: ErrorableResolvedSource = undefined;
- var original_source = fetchWithoutOnLoadPlugins(this, top.source_url.slice(), &log, &errorable, .print_source) catch return;
+ var original_source = fetchWithoutOnLoadPlugins(this, this.global, top.source_url.slice(), "", &log, &errorable, .print_source) catch return;
const code = original_source.source_code.slice();
top.position.line = mapping.original.lines;
top.position.line_start = mapping.original.lines;
@@ -2052,6 +1698,35 @@ pub const VirtualMachine = struct {
.fd = exception.fd != -1,
};
+ const extra_fields = .{
+ "url",
+ "info",
+ "pkg",
+ };
+
+ if (error_instance.isCell()) {
+ inline for (extra_fields) |field| {
+ if (error_instance.get(this.global, field)) |value| {
+ if (!value.isEmptyOrUndefinedOrNull()) {
+ const kind = value.jsType();
+ if (kind.isStringLike()) {
+ if (value.toStringOrNull(this.global)) |str| {
+ var zig_str = str.toSlice(this.global, bun.default_allocator);
+ defer zig_str.deinit();
+ try writer.print(comptime Output.prettyFmt(" {s}<d>: <r>\"{s}\"<r>\n", allow_ansi_color), .{ field, zig_str.slice() });
+ add_extra_line = true;
+ }
+ } else if (kind.isObject() or kind.isArray()) {
+ var zig_str = ZigString.init("");
+ value.jsonStringify(this.global, 2, &zig_str);
+ try writer.print(comptime Output.prettyFmt(" {s}<d>: <r>{s}<r>\n", allow_ansi_color), .{ field, zig_str });
+ add_extra_line = true;
+ }
+ }
+ }
+ }
+ }
+
if (show.path) {
if (show.syscall) {
try writer.writeAll(" ");
@@ -2061,6 +1736,16 @@ pub const VirtualMachine = struct {
try writer.print(comptime Output.prettyFmt(" path<d>: <r><cyan>\"{s}\"<r>\n", allow_ansi_color), .{exception.path});
}
+ if (show.fd) {
+ if (show.syscall) {
+ try writer.writeAll(" ");
+ } else if (show.errno) {
+ try writer.writeAll(" ");
+ }
+
+ try writer.print(comptime Output.prettyFmt(" fd<d>: <r><cyan>\"{d}\"<r>\n", allow_ansi_color), .{exception.fd});
+ }
+
if (show.system_code) {
if (show.syscall) {
try writer.writeAll(" ");
@@ -2284,7 +1969,7 @@ pub const ResolveError = struct {
pub fn fmt(allocator: std.mem.Allocator, specifier: string, referrer: string, err: anyerror) !string {
switch (err) {
error.ModuleNotFound => {
- if (Resolver.isPackagePath(specifier)) {
+ if (Resolver.isPackagePath(specifier) and !strings.containsChar(specifier, '/')) {
return try std.fmt.allocPrint(allocator, "Cannot find package \"{s}\" from \"{s}\"", .{ specifier, referrer });
} else {
return try std.fmt.allocPrint(allocator, "Cannot find module \"{s}\" from \"{s}\"", .{ specifier, referrer });
@@ -2655,812 +2340,6 @@ pub const BuildError = struct {
pub const JSPrivateDataTag = JSPrivateDataPtr.Tag;
-pub const HardcodedModule = enum {
- @"bun:ffi",
- @"bun:jsc",
- @"bun:main",
- @"bun:sqlite",
- @"depd",
- @"detect-libc",
- @"node:assert",
- @"node:buffer",
- @"node:child_process",
- @"node:events",
- @"node:fs",
- @"node:fs/promises",
- @"node:http",
- @"node:https",
- @"node:module",
- @"node:net",
- @"node:os",
- @"node:path",
- @"node:path/posix",
- @"node:path/win32",
- @"node:perf_hooks",
- @"node:process",
- @"node:stream",
- @"node:stream/consumer",
- @"node:stream/web",
- @"node:string_decoder",
- @"node:timers",
- @"node:timers/promises",
- @"node:tty",
- @"node:url",
- @"undici",
- @"ws",
- /// Already resolved modules go in here.
- /// This does not remap the module name, it is just a hash table.
- /// Do not put modules that have aliases in here
- /// Put those in Aliases
- pub const Map = bun.ComptimeStringMap(
- HardcodedModule,
- .{
- .{ "buffer", HardcodedModule.@"node:buffer" },
- .{ "bun:ffi", HardcodedModule.@"bun:ffi" },
- .{ "bun:jsc", HardcodedModule.@"bun:jsc" },
- .{ "bun:main", HardcodedModule.@"bun:main" },
- .{ "bun:sqlite", HardcodedModule.@"bun:sqlite" },
- .{ "depd", HardcodedModule.@"depd" },
- .{ "detect-libc", HardcodedModule.@"detect-libc" },
- .{ "node:assert", HardcodedModule.@"node:assert" },
- .{ "node:buffer", HardcodedModule.@"node:buffer" },
- .{ "node:child_process", HardcodedModule.@"node:child_process" },
- .{ "node:events", HardcodedModule.@"node:events" },
- .{ "node:fs", HardcodedModule.@"node:fs" },
- .{ "node:fs/promises", HardcodedModule.@"node:fs/promises" },
- .{ "node:http", HardcodedModule.@"node:http" },
- .{ "node:https", HardcodedModule.@"node:https" },
- .{ "node:module", HardcodedModule.@"node:module" },
- .{ "node:net", HardcodedModule.@"node:net" },
- .{ "node:os", HardcodedModule.@"node:os" },
- .{ "node:path", HardcodedModule.@"node:path" },
- .{ "node:path/posix", HardcodedModule.@"node:path/posix" },
- .{ "node:path/win32", HardcodedModule.@"node:path/win32" },
- .{ "node:perf_hooks", HardcodedModule.@"node:perf_hooks" },
- .{ "node:process", HardcodedModule.@"node:process" },
- .{ "node:stream", HardcodedModule.@"node:stream" },
- .{ "node:stream/consumer", HardcodedModule.@"node:stream/consumer" },
- .{ "node:stream/web", HardcodedModule.@"node:stream/web" },
- .{ "node:string_decoder", HardcodedModule.@"node:string_decoder" },
- .{ "node:timers", HardcodedModule.@"node:timers" },
- .{ "node:timers/promises", HardcodedModule.@"node:timers/promises" },
- .{ "node:tty", HardcodedModule.@"node:tty" },
- .{ "node:url", HardcodedModule.@"node:url" },
- .{ "undici", HardcodedModule.@"undici" },
- .{ "ws", HardcodedModule.@"ws" },
- },
- );
- pub const Aliases = bun.ComptimeStringMap(
- string,
- .{
- .{ "assert", "node:assert" },
- .{ "buffer", "node:buffer" },
- .{ "bun", "bun" },
- .{ "bun:ffi", "bun:ffi" },
- .{ "bun:jsc", "bun:jsc" },
- .{ "bun:sqlite", "bun:sqlite" },
- .{ "bun:wrap", "bun:wrap" },
- .{ "child_process", "node:child_process" },
- .{ "depd", "depd" },
- .{ "detect-libc", "detect-libc" },
- .{ "detect-libc/lib/detect-libc.js", "detect-libc" },
- .{ "events", "node:events" },
- .{ "ffi", "bun:ffi" },
- .{ "fs", "node:fs" },
- .{ "fs/promises", "node:fs/promises" },
- .{ "http", "node:http" },
- .{ "https", "node:https" },
- .{ "module", "node:module" },
- .{ "net", "node:net" },
- .{ "node:assert", "node:assert" },
- .{ "node:buffer", "node:buffer" },
- .{ "node:child_process", "node:child_process" },
- .{ "node:events", "node:events" },
- .{ "node:fs", "node:fs" },
- .{ "node:fs/promises", "node:fs/promises" },
- .{ "node:http", "node:http" },
- .{ "node:https", "node:https" },
- .{ "node:module", "node:module" },
- .{ "node:net", "node:net" },
- .{ "node:os", "node:os" },
- .{ "node:path", "node:path" },
- .{ "node:path/posix", "node:path/posix" },
- .{ "node:path/win32", "node:path/win32" },
- .{ "node:perf_hooks", "node:perf_hooks" },
- .{ "node:process", "node:process" },
- .{ "node:stream", "node:stream" },
- .{ "node:stream/consumer", "node:stream/consumer" },
- .{ "node:stream/web", "node:stream/web" },
- .{ "node:string_decoder", "node:string_decoder" },
- .{ "node:timers", "node:timers" },
- .{ "node:timers/promises", "node:timers/promises" },
- .{ "node:tty", "node:tty" },
- .{ "node:url", "node:url" },
- .{ "os", "node:os" },
- .{ "path", "node:path" },
- .{ "path/posix", "node:path/posix" },
- .{ "path/win32", "node:path/win32" },
- .{ "perf_hooks", "node:perf_hooks" },
- .{ "process", "node:process" },
- .{ "stream", "node:stream" },
- .{ "stream/consumer", "node:stream/consumer" },
- .{ "stream/web", "node:stream/web" },
- .{ "string_decoder", "node:string_decoder" },
- .{ "timers", "node:timers" },
- .{ "timers/promises", "node:timers/promises" },
- .{ "tty", "node:tty" },
- .{ "undici", "undici" },
- .{ "url", "node:url" },
- .{ "ws", "ws" },
- .{ "ws/lib/websocket", "ws" },
- },
- );
-};
-
-pub const DisabledModule = bun.ComptimeStringMap(
- void,
- .{
- .{"node:tls"},
- .{"node:worker_threads"},
- .{"tls"},
- .{"worker_threads"},
- },
-);
-
-// This exists to make it so we can reload these quicker in development
-fn jsModuleFromFile(from_path: string, comptime input: string) string {
- const absolute_path = comptime std.fs.path.dirname(@src().file).? ++ "/" ++ input;
- const Holder = struct {
- pub const file = @embedFile(absolute_path);
- };
-
- if (comptime !Environment.allow_assert) {
- if (from_path.len == 0) {
- return Holder.file;
- }
- }
-
- var file: std.fs.File = undefined;
-
- if (comptime Environment.allow_assert) {
- file = std.fs.openFileAbsoluteZ(absolute_path, .{ .mode = .read_only }) catch {
- const WarnOnce = struct {
- pub var warned = false;
- };
- if (!WarnOnce.warned) {
- WarnOnce.warned = true;
- Output.prettyErrorln("Could not find file: " ++ absolute_path ++ " - using embedded version", .{});
- }
- return Holder.file;
- };
- } else {
- var parts = [_]string{ from_path, input };
- var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
- var absolute_path_to_use = Fs.FileSystem.instance.absBuf(&parts, &buf);
- buf[absolute_path_to_use.len] = 0;
- file = std.fs.openFileAbsoluteZ(std.meta.assumeSentinel(absolute_path_to_use.ptr, 0), .{ .mode = .read_only }) catch {
- const WarnOnce = struct {
- pub var warned = false;
- };
- if (!WarnOnce.warned) {
- WarnOnce.warned = true;
- Output.prettyErrorln("Could not find file: {s}, so using embedded version", .{absolute_path_to_use});
- }
- return Holder.file;
- };
- }
-
- var contents = file.readToEndAlloc(bun.default_allocator, std.math.maxInt(usize)) catch @panic("Cannot read file: " ++ absolute_path);
- if (comptime !Environment.allow_assert) {
- file.close();
- }
- return contents;
-}
-
-inline fn jsSyntheticModule(comptime name: ResolvedSource.Tag) ResolvedSource {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(""),
- .specifier = ZigString.init(@tagName(name)),
- .source_url = ZigString.init(@tagName(name)),
- .hash = 0,
- .tag = name,
- };
-}
-
-fn dumpSource(specifier: string, printer: anytype) !void {
- const BunDebugHolder = struct {
- pub var dir: ?std.fs.Dir = null;
- };
- if (BunDebugHolder.dir == null) {
- BunDebugHolder.dir = try std.fs.cwd().makeOpenPath("/tmp/bun-debug-src/", .{ .iterate = true });
- }
-
- if (std.fs.path.dirname(specifier)) |dir_path| {
- var parent = try BunDebugHolder.dir.?.makeOpenPath(dir_path[1..], .{ .iterate = true });
- defer parent.close();
- try parent.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten());
- } else {
- try BunDebugHolder.dir.?.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten());
- }
-}
-
-pub const ModuleLoader = struct {
- pub export fn Bun__getDefaultLoader(global: *JSC.JSGlobalObject, str: *ZigString) Api.Loader {
- var jsc_vm = global.bunVM();
- const filename = str.toSlice(jsc_vm.allocator);
- defer filename.deinit();
- const loader = jsc_vm.bundler.options.loader(Fs.PathName.init(filename.slice()).ext).toAPI();
- if (loader == .file) {
- return Api.Loader.js;
- }
-
- return loader;
- }
- pub fn transpileSourceCode(
- jsc_vm: *VirtualMachine,
- specifier: string,
- path: Fs.Path,
- loader: options.Loader,
- log: *logger.Log,
- virtual_source: ?*const logger.Source,
- ret: *ErrorableResolvedSource,
- source_code_printer: *js_printer.BufferPrinter,
- comptime flags: FetchFlags,
- ) !ResolvedSource {
- const disable_transpilying = comptime flags.disableTranspiling();
-
- switch (loader) {
- .js, .jsx, .ts, .tsx, .json, .toml => {
- jsc_vm.transpiled_count += 1;
- jsc_vm.bundler.resetStore();
- const hash = http.Watcher.getHash(path.text);
-
- var allocator = if (jsc_vm.has_loaded) jsc_vm.arena.allocator() else jsc_vm.allocator;
-
- var fd: ?StoredFileDescriptorType = null;
- var package_json: ?*PackageJSON = null;
-
- if (jsc_vm.bun_dev_watcher) |watcher| {
- if (watcher.indexOf(hash)) |index| {
- const _fd = watcher.watchlist.items(.fd)[index];
- fd = if (_fd > 0) _fd else null;
- package_json = watcher.watchlist.items(.package_json)[index];
- }
- } else if (jsc_vm.bun_watcher) |watcher| {
- if (watcher.indexOf(hash)) |index| {
- const _fd = watcher.watchlist.items(.fd)[index];
- fd = if (_fd > 0) _fd else null;
- package_json = watcher.watchlist.items(.package_json)[index];
- }
- }
-
- var old = jsc_vm.bundler.log;
- jsc_vm.bundler.log = log;
- jsc_vm.bundler.linker.log = log;
- jsc_vm.bundler.resolver.log = log;
-
- defer {
- jsc_vm.bundler.log = old;
- jsc_vm.bundler.linker.log = old;
- jsc_vm.bundler.resolver.log = old;
- }
-
- // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words
- const is_node_override = specifier.len > "/bun-vfs/node_modules/".len and strings.eqlComptimeIgnoreLen(specifier[0.."/bun-vfs/node_modules/".len], "/bun-vfs/node_modules/");
-
- const macro_remappings = if (jsc_vm.macro_mode or !jsc_vm.has_any_macro_remappings or is_node_override)
- MacroRemap{}
- else
- jsc_vm.bundler.options.macro_remap;
-
- var fallback_source: logger.Source = undefined;
-
- var parse_options = Bundler.ParseOptions{
- .allocator = allocator,
- .path = path,
- .loader = loader,
- .dirname_fd = 0,
- .file_descriptor = fd,
- .file_hash = hash,
- .macro_remappings = macro_remappings,
- .jsx = jsc_vm.bundler.options.jsx,
- .virtual_source = virtual_source,
- .hoist_bun_plugin = true,
- };
-
- if (is_node_override) {
- if (NodeFallbackModules.contentsFromPath(specifier)) |code| {
- const fallback_path = Fs.Path.initWithNamespace(specifier, "node");
- fallback_source = logger.Source{ .path = fallback_path, .contents = code, .key_path = fallback_path };
- parse_options.virtual_source = &fallback_source;
- }
- }
-
- var parse_result = jsc_vm.bundler.parseMaybeReturnFileOnly(
- parse_options,
- null,
- disable_transpilying,
- ) orelse {
- return error.ParseError;
- };
-
- if (jsc_vm.bundler.log.errors > 0) {
- return error.ParseError;
- }
-
- if (comptime disable_transpilying) {
- return ResolvedSource{
- .allocator = null,
- .source_code = switch (comptime flags) {
- .print_source_and_clone => ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable),
- .print_source => ZigString.init(parse_result.source.contents),
- else => unreachable,
- },
- .specifier = ZigString.init(specifier),
- .source_url = ZigString.init(path.text),
- .hash = 0,
- };
- }
-
- const has_bun_plugin = parse_result.ast.bun_plugin.hoisted_stmts.items.len > 0;
-
- if (has_bun_plugin) {
- try ModuleLoader.runBunPlugin(jsc_vm, source_code_printer, &parse_result, ret);
- }
-
- var printer = source_code_printer.*;
- printer.ctx.reset();
-
- const start_count = jsc_vm.bundler.linker.import_counter;
- // We _must_ link because:
- // - node_modules bundle won't be properly
- try jsc_vm.bundler.linker.link(
- path,
- &parse_result,
- jsc_vm.origin,
- .absolute_path,
- false,
- true,
- );
-
- if (!jsc_vm.macro_mode)
- jsc_vm.resolved_count += jsc_vm.bundler.linker.import_counter - start_count;
- jsc_vm.bundler.linker.import_counter = 0;
-
- const written = brk: {
- defer source_code_printer.* = printer;
- break :brk try jsc_vm.bundler.printWithSourceMap(
- parse_result,
- @TypeOf(&printer),
- &printer,
- .esm_ascii,
- SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
- );
- };
-
- if (written == 0) {
- // if it's an empty file but there were plugins
- // we don't want it to break if you try to import from it
- if (has_bun_plugin) {
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init("// auto-generated plugin stub\nexport default undefined\n"),
- .specifier = ZigString.init(specifier),
- .source_url = ZigString.init(path.text),
- // // TODO: change hash to a bitfield
- // .hash = 1,
-
- // having JSC own the memory causes crashes
- .hash = 0,
- };
- }
- return error.PrintingErrorWriteFailed;
- }
-
- if (comptime Environment.dump_source) {
- try dumpSource(specifier, &printer);
- }
-
- if (jsc_vm.isWatcherEnabled()) {
- const resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, specifier, path.text, null);
-
- if (parse_result.input_fd) |fd_| {
- if (jsc_vm.bun_watcher != null and !is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) {
- jsc_vm.bun_watcher.?.addFile(
- fd_,
- path.text,
- hash,
- loader,
- 0,
- package_json,
- true,
- ) catch {};
- }
- }
-
- return resolved_source;
- }
-
- return ResolvedSource{
- .allocator = null,
- .source_code = ZigString.init(try default_allocator.dupe(u8, printer.ctx.getWritten())),
- .specifier = ZigString.init(specifier),
- .source_url = ZigString.init(path.text),
- // // TODO: change hash to a bitfield
- // .hash = 1,
-
- // having JSC own the memory causes crashes
- .hash = 0,
- };
- },
- // provideFetch() should be called
- .napi => unreachable,
- // .wasm => {
- // jsc_vm.transpiled_count += 1;
- // var fd: ?StoredFileDescriptorType = null;
-
- // var allocator = if (jsc_vm.has_loaded) jsc_vm.arena.allocator() else jsc_vm.allocator;
-
- // const hash = http.Watcher.getHash(path.text);
- // if (jsc_vm.watcher) |watcher| {
- // if (watcher.indexOf(hash)) |index| {
- // const _fd = watcher.watchlist.items(.fd)[index];
- // fd = if (_fd > 0) _fd else null;
- // }
- // }
-
- // var parse_options = Bundler.ParseOptions{
- // .allocator = allocator,
- // .path = path,
- // .loader = loader,
- // .dirname_fd = 0,
- // .file_descriptor = fd,
- // .file_hash = hash,
- // .macro_remappings = MacroRemap{},
- // .jsx = jsc_vm.bundler.options.jsx,
- // };
-
- // var parse_result = jsc_vm.bundler.parse(
- // parse_options,
- // null,
- // ) orelse {
- // return error.ParseError;
- // };
-
- // return ResolvedSource{
- // .allocator = if (jsc_vm.has_loaded) &jsc_vm.allocator else null,
- // .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable),
- // .specifier = ZigString.init(specifier),
- // .source_url = ZigString.init(path.text),
- // .hash = 0,
- // .tag = ResolvedSource.Tag.wasm,
- // };
- // },
- else => {
- return ResolvedSource{
- .allocator = &jsc_vm.allocator,
- .source_code = ZigString.init(try strings.quotedAlloc(jsc_vm.allocator, path.pretty)),
- .specifier = ZigString.init(path.text),
- .source_url = ZigString.init(path.text),
- .hash = 0,
- };
- },
- }
- }
-
- pub fn runBunPlugin(
- jsc_vm: *VirtualMachine,
- source_code_printer: *js_printer.BufferPrinter,
- parse_result: *ParseResult,
- ret: *ErrorableResolvedSource,
- ) !void {
- var printer = source_code_printer.*;
- printer.ctx.reset();
-
- defer printer.ctx.reset();
- // If we start transpiling in the middle of an existing transpilation session
- // we will hit undefined memory bugs
- // unless we disable resetting the store until we are done transpiling
- const prev_disable_reset = js_ast.Stmt.Data.Store.disable_reset;
- js_ast.Stmt.Data.Store.disable_reset = true;
- js_ast.Expr.Data.Store.disable_reset = true;
-
- // flip the source code we use
- // unless we're already transpiling a plugin
- // that case could happen when
- const was_printing_plugin = jsc_vm.is_printing_plugin;
- const prev = jsc_vm.bundler.resolver.caches.fs.use_alternate_source_cache;
- jsc_vm.is_printing_plugin = true;
- defer {
- js_ast.Stmt.Data.Store.disable_reset = prev_disable_reset;
- js_ast.Expr.Data.Store.disable_reset = prev_disable_reset;
- if (!was_printing_plugin) jsc_vm.bundler.resolver.caches.fs.use_alternate_source_cache = prev;
- jsc_vm.is_printing_plugin = was_printing_plugin;
- }
- // we flip use_alternate_source_cache
- if (!was_printing_plugin) jsc_vm.bundler.resolver.caches.fs.use_alternate_source_cache = !prev;
-
- // this is a bad idea, but it should work for now.
- const original_name = parse_result.ast.symbols[parse_result.ast.bun_plugin.ref.innerIndex()].original_name;
- parse_result.ast.symbols[parse_result.ast.bun_plugin.ref.innerIndex()].original_name = "globalThis.Bun.plugin";
- defer {
- parse_result.ast.symbols[parse_result.ast.bun_plugin.ref.innerIndex()].original_name = original_name;
- }
- const hoisted_stmts = parse_result.ast.bun_plugin.hoisted_stmts.items;
-
- var parts = [1]js_ast.Part{
- js_ast.Part{
- .stmts = hoisted_stmts,
- },
- };
- var ast_copy = parse_result.ast;
- ast_copy.import_records = try jsc_vm.allocator.dupe(ImportRecord, ast_copy.import_records);
- defer jsc_vm.allocator.free(ast_copy.import_records);
- ast_copy.parts = &parts;
- ast_copy.prepend_part = null;
- var temporary_source = parse_result.source;
- var source_name = try std.fmt.allocPrint(jsc_vm.allocator, "{s}.plugin.{s}", .{ temporary_source.path.text, temporary_source.path.name.ext[1..] });
- temporary_source.path = Fs.Path.init(source_name);
-
- var temp_parse_result = parse_result.*;
- temp_parse_result.ast = ast_copy;
-
- try jsc_vm.bundler.linker.link(
- temporary_source.path,
- &temp_parse_result,
- jsc_vm.origin,
- .absolute_path,
- false,
- true,
- );
-
- _ = brk: {
- defer source_code_printer.* = printer;
- break :brk try jsc_vm.bundler.printWithSourceMapMaybe(
- temp_parse_result.ast,
- &temporary_source,
- @TypeOf(&printer),
- &printer,
- .esm_ascii,
- true,
- SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
- );
- };
- const wrote = printer.ctx.getWritten();
-
- if (wrote.len > 0) {
- if (comptime Environment.dump_source)
- try dumpSource(temporary_source.path.text, &printer);
-
- var exception = [1]JSC.JSValue{JSC.JSValue.zero};
- const promise = JSC.JSModuleLoader.evaluate(
- jsc_vm.global,
- wrote.ptr,
- wrote.len,
- temporary_source.path.text.ptr,
- temporary_source.path.text.len,
- parse_result.source.path.text.ptr,
- parse_result.source.path.text.len,
- JSC.JSValue.jsUndefined(),
- &exception,
- );
- if (!exception[0].isEmpty()) {
- ret.* = JSC.ErrorableResolvedSource.err(
- error.JSErrorObject,
- exception[0].asVoid(),
- );
- return error.PluginError;
- }
-
- if (!promise.isEmptyOrUndefinedOrNull()) {
- if (promise.asInternalPromise()) |promise_value| {
- jsc_vm.waitForPromise(promise_value);
-
- if (promise_value.status(jsc_vm.global.vm()) == .Rejected) {
- ret.* = JSC.ErrorableResolvedSource.err(
- error.JSErrorObject,
- promise_value.result(jsc_vm.global.vm()).asVoid(),
- );
- return error.PluginError;
- }
- }
- }
- }
- }
- pub fn normalizeSpecifier(jsc_vm: *VirtualMachine, slice_: string) string {
- var slice = slice_;
- if (slice.len == 0) return slice;
- var was_http = false;
- if (strings.hasPrefixComptime(slice, "https://")) {
- slice = slice["https://".len..];
- was_http = true;
- } else if (strings.hasPrefixComptime(slice, "http://")) {
- slice = slice["http://".len..];
- was_http = true;
- }
-
- if (strings.hasPrefix(slice, jsc_vm.origin.host)) {
- slice = slice[jsc_vm.origin.host.len..];
- } else if (was_http) {
- if (strings.indexOfChar(slice, '/')) |i| {
- slice = slice[i..];
- }
- }
-
- if (jsc_vm.origin.path.len > 1) {
- if (strings.hasPrefix(slice, jsc_vm.origin.path)) {
- slice = slice[jsc_vm.origin.path.len..];
- }
- }
-
- if (jsc_vm.bundler.options.routes.asset_prefix_path.len > 0) {
- if (strings.hasPrefix(slice, jsc_vm.bundler.options.routes.asset_prefix_path)) {
- slice = slice[jsc_vm.bundler.options.routes.asset_prefix_path.len..];
- }
- }
-
- return slice;
- }
-
- pub export fn Bun__fetchBuiltinModule(
- jsc_vm: *VirtualMachine,
- globalObject: *JSC.JSGlobalObject,
- specifier: *ZigString,
- referrer: *ZigString,
- ret: *ErrorableResolvedSource,
- ) bool {
- JSC.markBinding(@src());
- var log = logger.Log.init(jsc_vm.bundler.allocator);
- defer log.deinit();
- if (jsc_vm.fetchBuiltinModule(specifier.slice(), &log, false) catch |err| {
- VirtualMachine.processFetchLog(globalObject, specifier.*, referrer.*, &log, ret, err);
- return true;
- }) |builtin| {
- ret.* = ErrorableResolvedSource.ok(builtin);
- return true;
- } else {
- return false;
- }
- }
-
- pub export fn Bun__transpileFile(
- jsc_vm: *VirtualMachine,
- globalObject: *JSC.JSGlobalObject,
- specifier_ptr: *ZigString,
- referrer: *ZigString,
- ret: *ErrorableResolvedSource,
- ) bool {
- JSC.markBinding(@src());
- var log = logger.Log.init(jsc_vm.bundler.allocator);
- defer log.deinit();
- var _specifier = specifier_ptr.toSlice(jsc_vm.allocator);
- defer _specifier.deinit();
- var specifier = normalizeSpecifier(jsc_vm, _specifier.slice());
- const path = Fs.Path.init(specifier);
- const loader = jsc_vm.bundler.options.loaders.get(path.name.ext) orelse brk: {
- if (strings.eqlLong(specifier, jsc_vm.main, true)) {
- break :brk options.Loader.js;
- }
-
- break :brk options.Loader.file;
- };
- ret.* = ErrorableResolvedSource.ok(
- ModuleLoader.transpileSourceCode(
- jsc_vm,
- specifier,
- path,
- loader,
- &log,
- null,
- ret,
- VirtualMachine.source_code_printer.?,
- FetchFlags.transpile,
- ) catch |err| {
- if (err == error.PluginError) {
- return true;
- }
- VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer.*, &log, ret, err);
- return true;
- },
- );
- return true;
- }
-
- export fn Bun__runVirtualModule(globalObject: *JSC.JSGlobalObject, specifier_ptr: *ZigString) JSValue {
- JSC.markBinding(@src());
- if (globalObject.bunVM().plugin_runner == null) return JSValue.zero;
-
- const specifier = specifier_ptr.slice();
-
- if (!PluginRunner.couldBePlugin(specifier)) {
- return JSValue.zero;
- }
-
- const namespace = PluginRunner.extractNamespace(specifier);
- const after_namespace = if (namespace.len == 0)
- specifier
- else
- specifier[@minimum(namespace.len + 1, specifier.len)..];
-
- return globalObject.runOnLoadPlugins(ZigString.init(namespace), ZigString.init(after_namespace), .bun) orelse return JSValue.zero;
- }
-
- export fn Bun__transpileVirtualModule(
- globalObject: *JSC.JSGlobalObject,
- specifier_ptr: *ZigString,
- referrer_ptr: *ZigString,
- source_code: *ZigString,
- loader_: Api.Loader,
- ret: *ErrorableResolvedSource,
- ) bool {
- JSC.markBinding(@src());
- const jsc_vm = globalObject.bunVM();
- std.debug.assert(jsc_vm.plugin_runner != null);
-
- var specifier_slice = specifier_ptr.toSlice(jsc_vm.allocator);
- const specifier = specifier_slice.slice();
- defer specifier_slice.deinit();
- var source_code_slice = source_code.toSlice(jsc_vm.allocator);
- defer source_code_slice.deinit();
-
- var virtual_source = logger.Source.initPathString(specifier, source_code_slice.slice());
- var log = logger.Log.init(jsc_vm.allocator);
- const path = Fs.Path.init(specifier);
-
- const loader = if (loader_ != ._none)
- options.Loader.fromString(@tagName(loader_)).?
- else
- jsc_vm.bundler.options.loaders.get(path.name.ext) orelse brk: {
- if (strings.eqlLong(specifier, jsc_vm.main, true)) {
- break :brk options.Loader.js;
- }
-
- break :brk options.Loader.file;
- };
-
- defer log.deinit();
- ret.* = ErrorableResolvedSource.ok(
- ModuleLoader.transpileSourceCode(
- jsc_vm,
- specifier,
- path,
- options.Loader.fromString(@tagName(loader)).?,
- &log,
- &virtual_source,
- ret,
- VirtualMachine.source_code_printer.?,
- FetchFlags.transpile,
- ) catch |err| {
- if (err == error.PluginError) {
- return true;
- }
- VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer_ptr.*, &log, ret, err);
- return true;
- },
- );
- return true;
- }
-
- comptime {
- _ = Bun__transpileVirtualModule;
- _ = Bun__runVirtualModule;
- _ = Bun__transpileFile;
- _ = Bun__fetchBuiltinModule;
- _ = Bun__getDefaultLoader;
- }
-};
-
-const FetchFlags = enum {
- transpile,
- print_source,
- print_source_and_clone,
-
- pub fn disableTranspiling(this: FetchFlags) bool {
- return this != .transpile;
- }
-};
-
pub const Watcher = @import("../watcher.zig").NewWatcher(*HotReloader);
pub const HotReloader = struct {
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
new file mode 100644
index 000000000..bee5fa1a1
--- /dev/null
+++ b/src/bun.js/module_loader.zig
@@ -0,0 +1,2052 @@
+const std = @import("std");
+const is_bindgen: bool = std.meta.globalOption("bindgen", bool) orelse false;
+const StaticExport = @import("./bindings/static_export.zig");
+const c_char = StaticExport.c_char;
+const bun = @import("../global.zig");
+const string = bun.string;
+const Output = bun.Output;
+const Global = bun.Global;
+const Environment = bun.Environment;
+const strings = bun.strings;
+const MutableString = bun.MutableString;
+const stringZ = bun.stringZ;
+const default_allocator = bun.default_allocator;
+const StoredFileDescriptorType = bun.StoredFileDescriptorType;
+const Arena = @import("../mimalloc_arena.zig").Arena;
+const C = bun.C;
+const NetworkThread = @import("http").NetworkThread;
+const IO = @import("io");
+const Allocator = std.mem.Allocator;
+const IdentityContext = @import("../identity_context.zig").IdentityContext;
+const Fs = @import("../fs.zig");
+const Resolver = @import("../resolver/resolver.zig");
+const ast = @import("../import_record.zig");
+const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
+const MacroEntryPoint = @import("../bundler.zig").MacroEntryPoint;
+const ParseResult = @import("../bundler.zig").ParseResult;
+const logger = @import("../logger.zig");
+const Api = @import("../api/schema.zig").Api;
+const options = @import("../options.zig");
+const Bundler = @import("../bundler.zig").Bundler;
+const PluginRunner = @import("../bundler.zig").PluginRunner;
+const ServerEntryPoint = @import("../bundler.zig").ServerEntryPoint;
+const js_printer = @import("../js_printer.zig");
+const js_parser = @import("../js_parser.zig");
+const js_ast = @import("../js_ast.zig");
+const hash_map = @import("../hash_map.zig");
+const http = @import("../http.zig");
+const NodeFallbackModules = @import("../node_fallbacks.zig");
+const ImportKind = ast.ImportKind;
+const Analytics = @import("../analytics/analytics_thread.zig");
+const ZigString = @import("../jsc.zig").ZigString;
+const Runtime = @import("../runtime.zig");
+const Router = @import("./api/router.zig");
+const ImportRecord = ast.ImportRecord;
+const DotEnv = @import("../env_loader.zig");
+const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
+const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
+const WebCore = @import("../jsc.zig").WebCore;
+const Request = WebCore.Request;
+const Response = WebCore.Response;
+const Headers = WebCore.Headers;
+const Fetch = WebCore.Fetch;
+const FetchEvent = WebCore.FetchEvent;
+const js = @import("../jsc.zig").C;
+const JSC = @import("../jsc.zig");
+const JSError = @import("./base.zig").JSError;
+const d = @import("./base.zig").d;
+const MarkedArrayBuffer = @import("./base.zig").MarkedArrayBuffer;
+const getAllocator = @import("./base.zig").getAllocator;
+const JSValue = @import("../jsc.zig").JSValue;
+const NewClass = @import("./base.zig").NewClass;
+const Microtask = @import("../jsc.zig").Microtask;
+const JSGlobalObject = @import("../jsc.zig").JSGlobalObject;
+const ExceptionValueRef = @import("../jsc.zig").ExceptionValueRef;
+const JSPrivateDataPtr = @import("../jsc.zig").JSPrivateDataPtr;
+const ZigConsoleClient = @import("../jsc.zig").ZigConsoleClient;
+const Node = @import("../jsc.zig").Node;
+const ZigException = @import("../jsc.zig").ZigException;
+const ZigStackTrace = @import("../jsc.zig").ZigStackTrace;
+const ErrorableResolvedSource = @import("../jsc.zig").ErrorableResolvedSource;
+const ResolvedSource = @import("../jsc.zig").ResolvedSource;
+const JSPromise = @import("../jsc.zig").JSPromise;
+const JSInternalPromise = @import("../jsc.zig").JSInternalPromise;
+const JSModuleLoader = @import("../jsc.zig").JSModuleLoader;
+const JSPromiseRejectionOperation = @import("../jsc.zig").JSPromiseRejectionOperation;
+const Exception = @import("../jsc.zig").Exception;
+const ErrorableZigString = @import("../jsc.zig").ErrorableZigString;
+const ZigGlobalObject = @import("../jsc.zig").ZigGlobalObject;
+const VM = @import("../jsc.zig").VM;
+const JSFunction = @import("../jsc.zig").JSFunction;
+const Config = @import("./config.zig");
+const URL = @import("../url.zig").URL;
+const Transpiler = @import("./api/transpiler.zig");
+const Bun = JSC.API.Bun;
+const EventLoop = JSC.EventLoop;
+const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
+const ThreadSafeFunction = JSC.napi.ThreadSafeFunction;
+const PackageManager = @import("../install/install.zig").PackageManager;
+const Install = @import("../install/install.zig");
+const VirtualMachine = JSC.VirtualMachine;
+const Dependency = @import("../install/dependency.zig");
+
+// This exists to make it so we can reload these quicker in development
+fn jsModuleFromFile(from_path: string, comptime input: string) string {
+ const absolute_path = comptime std.fs.path.dirname(@src().file).? ++ "/" ++ input;
+ const Holder = struct {
+ pub const file = @embedFile(absolute_path);
+ };
+
+ if (comptime !Environment.allow_assert) {
+ if (from_path.len == 0) {
+ return Holder.file;
+ }
+ }
+
+ var file: std.fs.File = undefined;
+
+ if (comptime Environment.allow_assert) {
+ file = std.fs.openFileAbsoluteZ(absolute_path, .{ .mode = .read_only }) catch {
+ const WarnOnce = struct {
+ pub var warned = false;
+ };
+ if (!WarnOnce.warned) {
+ WarnOnce.warned = true;
+ Output.prettyErrorln("Could not find file: " ++ absolute_path ++ " - using embedded version", .{});
+ }
+ return Holder.file;
+ };
+ } else {
+ var parts = [_]string{ from_path, input };
+ var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var absolute_path_to_use = Fs.FileSystem.instance.absBuf(&parts, &buf);
+ buf[absolute_path_to_use.len] = 0;
+ file = std.fs.openFileAbsoluteZ(std.meta.assumeSentinel(absolute_path_to_use.ptr, 0), .{ .mode = .read_only }) catch {
+ const WarnOnce = struct {
+ pub var warned = false;
+ };
+ if (!WarnOnce.warned) {
+ WarnOnce.warned = true;
+ Output.prettyErrorln("Could not find file: {s}, so using embedded version", .{absolute_path_to_use});
+ }
+ return Holder.file;
+ };
+ }
+
+ var contents = file.readToEndAlloc(bun.default_allocator, std.math.maxInt(usize)) catch @panic("Cannot read file: " ++ absolute_path);
+ if (comptime !Environment.allow_assert) {
+ file.close();
+ }
+ return contents;
+}
+
+inline fn jsSyntheticModule(comptime name: ResolvedSource.Tag) ResolvedSource {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(""),
+ .specifier = ZigString.init(@tagName(name)),
+ .source_url = ZigString.init(@tagName(name)),
+ .hash = 0,
+ .tag = name,
+ };
+}
+
+fn dumpSource(specifier: string, printer: anytype) !void {
+ const BunDebugHolder = struct {
+ pub var dir: ?std.fs.Dir = null;
+ };
+ if (BunDebugHolder.dir == null) {
+ BunDebugHolder.dir = try std.fs.cwd().makeOpenPath("/tmp/bun-debug-src/", .{ .iterate = true });
+ }
+
+ if (std.fs.path.dirname(specifier)) |dir_path| {
+ var parent = try BunDebugHolder.dir.?.makeOpenPath(dir_path[1..], .{ .iterate = true });
+ defer parent.close();
+ try parent.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten());
+ } else {
+ try BunDebugHolder.dir.?.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten());
+ }
+}
+
+pub const ModuleLoader = struct {
+ pub const AsyncModule = struct {
+
+ // This is all the state used by the printer to print the module
+ parse_result: ParseResult,
+ stmt_blocks: []*js_ast.Stmt.Data.Store.All.Block = &[_]*js_ast.Stmt.Data.Store.All.Block{},
+ expr_blocks: []*js_ast.Expr.Data.Store.All.Block = &[_]*js_ast.Expr.Data.Store.All.Block{},
+ promise: JSC.Strong = .{},
+ path: Fs.Path,
+ specifier: string = "",
+ referrer: string = "",
+ string_buf: []u8 = &[_]u8{},
+ fd: ?StoredFileDescriptorType = null,
+ package_json: ?*PackageJSON = null,
+ loader: Api.Loader,
+ hash: u32 = std.math.maxInt(u32),
+ globalThis: *JSC.JSGlobalObject = undefined,
+
+ // This is the specific state for making it async
+ poll_ref: JSC.PollRef = .{},
+
+ pub const Id = u32;
+ const debug = Output.scoped(.ModuleLoader, false);
+
+ const PackageDownloadError = struct {
+ name: []const u8,
+ resolution: Install.Resolution,
+ err: anyerror,
+ url: []const u8,
+ };
+
+ const PackageResolveError = struct {
+ name: []const u8,
+ err: anyerror,
+ url: []const u8,
+ version: Dependency.Version,
+ };
+
+ pub const Queue = struct {
+ map: Map = .{},
+ concurrent_task_count: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0),
+
+ const DeferredDependencyError = struct {
+ dependency: Dependency,
+ root_dependency_id: Install.PackageID,
+ err: anyerror,
+ };
+
+ pub const Map = std.ArrayListUnmanaged(AsyncModule);
+
+ pub fn enqueue(this: *Queue, globalObject: *JSC.JSGlobalObject, opts: anytype) void {
+ debug("enqueue: {s}", .{opts.specifier});
+ var module = AsyncModule.init(opts, globalObject) catch unreachable;
+ module.poll_ref.ref(this.vm());
+
+ this.map.append(this.vm().allocator, module) catch unreachable;
+ this.vm().packageManager().flushDependencyQueue();
+ _ = this.vm().packageManager().scheduleNetworkTasks();
+ }
+
+ pub fn onDependencyError(ctx: *anyopaque, dependency: Dependency, root_dependency_id: Install.PackageID, err: anyerror) void {
+ var this = bun.cast(*Queue, ctx);
+ debug("onDependencyError: {s}", .{this.vm().packageManager().lockfile.str(dependency.name)});
+
+ var modules: []AsyncModule = this.map.items;
+ var i: usize = 0;
+ outer: for (modules) |module_| {
+ var module = module_;
+ var root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id);
+ for (root_dependency_ids) |dep, dep_i| {
+ if (dep != root_dependency_id) continue;
+ module.resolveError(
+ this.vm(),
+ module.parse_result.pending_imports.items(.import_record_id)[dep_i],
+ .{
+ .name = this.vm().packageManager().lockfile.str(dependency.name),
+ .err = err,
+ .url = "",
+ .version = dependency.version,
+ },
+ ) catch unreachable;
+ continue :outer;
+ }
+
+ modules[i] = module;
+ i += 1;
+ }
+ this.map.items.len = i;
+ }
+ pub fn onWakeHandler(ctx: *anyopaque, _: *PackageManager) void {
+ debug("onWake", .{});
+ var this = bun.cast(*Queue, ctx);
+ var concurrent_task = bun.default_allocator.create(JSC.ConcurrentTask) catch @panic("OOM");
+ concurrent_task.* = .{
+ .task = JSC.Task.init(this),
+ .auto_delete = true,
+ };
+ this.vm().enqueueTaskConcurrent(concurrent_task);
+ }
+
+ pub fn onPoll(this: *Queue) void {
+ debug("onPoll", .{});
+ var pm = this.vm().packageManager();
+
+ this.runTasks();
+ this.pollModules();
+ _ = pm.flushDependencyQueue();
+ _ = pm.scheduleNetworkTasks();
+ }
+
+ pub fn runTasks(this: *Queue) void {
+ var pm = this.vm().packageManager();
+
+ if (Output.enable_ansi_colors_stderr) {
+ pm.startProgressBarIfNone();
+ pm.runTasks(
+ *Queue,
+ this,
+ .{
+ .onExtract = onExtract,
+ .onResolve = onResolve,
+ .onPackageManifestError = onPackageManifestError,
+ .onPackageDownloadError = onPackageDownloadError,
+ .progress_bar = true,
+ },
+ PackageManager.Options.LogLevel.default,
+ ) catch unreachable;
+ } else {
+ pm.runTasks(
+ *Queue,
+ this,
+ .{
+ .onExtract = onExtract,
+ .onResolve = onResolve,
+ .onPackageManifestError = onPackageManifestError,
+ .onPackageDownloadError = onPackageDownloadError,
+ },
+ PackageManager.Options.LogLevel.default_no_progress,
+ ) catch unreachable;
+ }
+ }
+
+ pub fn onResolve(_: *Queue) void {
+ debug("onResolve", .{});
+ }
+
+ pub fn onPackageManifestError(
+ this: *Queue,
+ name: []const u8,
+ err: anyerror,
+ url: []const u8,
+ ) void {
+ debug("onPackageManifestError: {s}", .{name});
+
+ var modules: []AsyncModule = this.map.items;
+ var i: usize = 0;
+ outer: for (modules) |module_| {
+ var module = module_;
+ var tags = module.parse_result.pending_imports.items(.tag);
+ for (tags) |tag, tag_i| {
+ if (tag == .resolve) {
+ var esms = module.parse_result.pending_imports.items(.esm);
+ const esm = esms[tag_i];
+ var string_bufs = module.parse_result.pending_imports.items(.string_buf);
+
+ if (!strings.eql(esm.name.slice(string_bufs[tag_i]), name)) continue;
+
+ var versions = module.parse_result.pending_imports.items(.dependency);
+
+ module.resolveError(
+ this.vm(),
+ module.parse_result.pending_imports.items(.import_record_id)[tag_i],
+ .{
+ .name = name,
+ .err = err,
+ .url = url,
+ .version = versions[tag_i],
+ },
+ ) catch unreachable;
+ continue :outer;
+ }
+ }
+
+ modules[i] = module;
+ i += 1;
+ }
+ this.map.items.len = i;
+ }
+
+ pub fn onPackageDownloadError(
+ this: *Queue,
+ package_id: Install.PackageID,
+ name: []const u8,
+ resolution: Install.Resolution,
+ err: anyerror,
+ url: []const u8,
+ ) void {
+ debug("onPackageDownloadError: {s}", .{name});
+
+ var modules: []AsyncModule = this.map.items;
+ var i: usize = 0;
+ outer: for (modules) |module_| {
+ var module = module_;
+ var root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id);
+ for (root_dependency_ids) |dep, dep_i| {
+ if (this.vm().packageManager().dynamicRootDependencies().items[dep].resolution_id != package_id) continue;
+ module.downloadError(
+ this.vm(),
+ module.parse_result.pending_imports.items(.import_record_id)[dep_i],
+ .{
+ .name = name,
+ .resolution = resolution,
+ .err = err,
+ .url = url,
+ },
+ ) catch unreachable;
+ continue :outer;
+ }
+
+ modules[i] = module;
+ i += 1;
+ }
+ this.map.items.len = i;
+ }
+
+ pub fn onExtract(this: *Queue, package_id: u32, comptime _: PackageManager.Options.LogLevel) void {
+ if (comptime Environment.allow_assert)
+ debug("onExtract: {s} ({d})", .{
+ this.vm().packageManager().lockfile.str(this.vm().packageManager().lockfile.packages.get(package_id).name),
+ package_id,
+ });
+ this.onPackageID(package_id);
+ }
+
+ pub fn onPackageID(this: *Queue, package_id: u32) void {
+ var values = this.map.items;
+ for (values) |value| {
+ var package_ids = value.parse_result.pending_imports.items(.resolution_id);
+
+ _ = package_id;
+ _ = package_ids;
+ }
+ }
+
+ pub fn pollModules(this: *Queue) void {
+ var pm = this.vm().packageManager();
+ var modules = this.map.items;
+ var i: usize = 0;
+ for (modules) |mod| {
+ var module = mod;
+ var tags = module.parse_result.pending_imports.items(.tag);
+ var root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id);
+ // var esms = module.parse_result.pending_imports.items(.esm);
+ // var versions = module.parse_result.pending_imports.items(.dependency);
+ var done_count: usize = 0;
+ for (tags) |tag, tag_i| {
+ const root_id = root_dependency_ids[tag_i];
+ if (root_id == Install.invalid_package_id) continue;
+ const root_items = pm.dynamicRootDependencies().items;
+ if (root_items.len <= root_id) continue;
+ const package_id = root_items[root_id].resolution_id;
+
+ switch (tag) {
+ .resolve => {
+ if (package_id == Install.invalid_package_id) {
+ continue;
+ }
+
+ // if we get here, the package has already been resolved.
+ tags[tag_i] = .download;
+ },
+ .download => {
+ if (package_id == Install.invalid_package_id) {
+ unreachable;
+ }
+ },
+ .done => {
+ done_count += 1;
+ continue;
+ },
+ }
+
+ if (package_id == Install.invalid_package_id) {
+ continue;
+ }
+
+ const package = pm.lockfile.packages.get(package_id);
+ std.debug.assert(package.resolution.tag != .root);
+
+ switch (pm.determinePreinstallState(package, pm.lockfile)) {
+ .done => {
+ done_count += 1;
+ tags[tag_i] = .done;
+ },
+ .extracting => {
+ // we are extracting the package
+ // we need to wait for the next poll
+ continue;
+ },
+ .extract => {},
+ else => {},
+ }
+ }
+
+ if (done_count == tags.len) {
+ if (i + 1 >= modules.len) {
+ this.vm().packageManager().endProgressBar();
+ }
+ module.done(this.vm());
+ } else {
+ modules[i] = module;
+ i += 1;
+ }
+ }
+ this.map.items.len = i;
+ if (i == 0) {
+ // ensure we always end the progress bar
+ this.vm().packageManager().endProgressBar();
+ }
+ }
+
+ pub fn vm(this: *Queue) *VirtualMachine {
+ return @fieldParentPtr(VirtualMachine, "modules", this);
+ }
+ };
+
+ pub fn init(opts: anytype, globalObject: *JSC.JSGlobalObject) !AsyncModule {
+ var promise = JSC.Strong{};
+ var stmt_blocks = js_ast.Stmt.Data.Store.toOwnedSlice();
+ var expr_blocks = js_ast.Expr.Data.Store.toOwnedSlice();
+ const this_promise = JSValue.createInternalPromise(globalObject);
+ promise.set(globalObject, this_promise);
+
+ var buf = bun.StringBuilder{};
+ buf.count(opts.referrer);
+ buf.count(opts.specifier);
+ buf.count(opts.path.text);
+
+ try buf.allocate(bun.default_allocator);
+ opts.promise_ptr.?.* = this_promise.asInternalPromise().?;
+ const referrer = buf.append(opts.referrer);
+ const specifier = buf.append(opts.specifier);
+ const path = Fs.Path.init(buf.append(opts.path.text));
+
+ return AsyncModule{
+ .parse_result = opts.parse_result,
+ .promise = promise,
+ .path = path,
+ .specifier = specifier,
+ .referrer = referrer,
+ .fd = opts.fd,
+ .package_json = opts.package_json,
+ .loader = opts.loader.toAPI(),
+ .string_buf = buf.allocatedSlice(),
+ .stmt_blocks = stmt_blocks,
+ .globalThis = globalObject,
+ .expr_blocks = expr_blocks,
+ };
+ }
+
+ pub fn done(this: *AsyncModule, jsc_vm: *JSC.VirtualMachine) void {
+ var log = logger.Log.init(jsc_vm.allocator);
+ defer log.deinit();
+ var errorable: ErrorableResolvedSource = undefined;
+ this.poll_ref.unref(jsc_vm);
+ outer: {
+ errorable = ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| {
+ JSC.VirtualMachine.processFetchLog(
+ this.globalThis,
+ ZigString.init(this.specifier),
+ ZigString.init(this.referrer),
+ &log,
+ &errorable,
+ err,
+ );
+ break :outer;
+ });
+ }
+
+ var spec = ZigString.init(this.specifier).withEncoding();
+ var ref = ZigString.init(this.referrer).withEncoding();
+ Bun__onFulfillAsyncModule(
+ this.promise.get().?,
+ &errorable,
+ &spec,
+ &ref,
+ );
+ this.deinit();
+ }
+
+ pub fn resolveError(this: *AsyncModule, vm: *JSC.VirtualMachine, import_record_id: u32, result: PackageResolveError) !void {
+ var globalThis = this.globalThis;
+
+ var msg: []u8 = try switch (result.err) {
+ error.PackageManifestHTTP400 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 400 while resolving package '{s}' at '{s}'",
+ .{ result.name, result.url },
+ ),
+ error.PackageManifestHTTP401 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 401 while resolving package '{s}' at '{s}'",
+ .{ result.name, result.url },
+ ),
+ error.PackageManifestHTTP402 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 402 while resolving package '{s}' at '{s}'",
+ .{ result.name, result.url },
+ ),
+ error.PackageManifestHTTP403 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 403 while resolving package '{s}' at '{s}'",
+ .{ result.name, result.url },
+ ),
+ error.PackageManifestHTTP404 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "Package '{s}' was not found",
+ .{result.name},
+ ),
+ error.PackageManifestHTTP4xx => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 4xx while resolving package '{s}' at '{s}'",
+ .{ result.name, result.url },
+ ),
+ error.PackageManifestHTTP5xx => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 5xx while resolving package '{s}' at '{s}'",
+ .{ result.name, result.url },
+ ),
+ error.DistTagNotFound, error.NoMatchingVersion => brk: {
+ const prefix: []const u8 = if (result.err == error.NoMatchingVersion and result.version.tag == .npm and result.version.value.npm.isExact())
+ "Version not found"
+ else if (result.version.tag == .npm and !result.version.value.npm.isExact())
+ "No matching version found"
+ else
+ "No match found";
+
+ break :brk std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s} '{s}' for package '{s}' (but package exists)",
+ .{ prefix, vm.packageManager().lockfile.str(result.version.literal), result.name },
+ );
+ },
+ else => |err| std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s} resolving package '{s}' at '{s}'",
+ .{ std.mem.span(@errorName(err)), result.name, result.url },
+ ),
+ };
+
+ const name: []const u8 = switch (result.err) {
+ error.NoMatchingVersion => "PackageVersionNotFound",
+ error.DistTagNotFound => "PackageTagNotFound",
+ error.PackageManifestHTTP403 => "PackageForbidden",
+ error.PackageManifestHTTP404 => "PackageNotFound",
+ else => "PackageResolveError",
+ };
+
+ var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis);
+ if (result.url.len > 0)
+ error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(this.specifier).withEncoding().toValueGC(globalThis));
+ const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records[import_record_id].range, "").location.?;
+ error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line));
+ if (location.line_text) |line_text| {
+ error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toValueGC(globalThis));
+ }
+ error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column));
+ if (this.referrer.len > 0 and !strings.eqlComptime(this.referrer, "undefined")) {
+ error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.referrer).withEncoding().toValueGC(globalThis));
+ }
+
+ const promise_value = this.promise.swap();
+ var promise = promise_value.asInternalPromise().?;
+ promise_value.ensureStillAlive();
+ this.poll_ref.unref(vm);
+ this.deinit();
+ promise.rejectAsHandled(globalThis, error_instance);
+ }
+ pub fn downloadError(this: *AsyncModule, vm: *JSC.VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void {
+ var globalThis = this.globalThis;
+
+ const msg_args = .{
+ result.name,
+ result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items),
+ };
+
+ var msg: []u8 = try switch (result.err) {
+ error.TarballHTTP400 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 400 downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballHTTP401 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 401 downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballHTTP402 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 402 downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballHTTP403 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 403 downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballHTTP404 => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 404 downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballHTTP4xx => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 4xx downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballHTTP5xx => std.fmt.allocPrint(
+ bun.default_allocator,
+ "HTTP 5xx downloading package '{s}@{any}'",
+ msg_args,
+ ),
+ error.TarballFailedToExtract => std.fmt.allocPrint(
+ bun.default_allocator,
+ "Failed to extract tarball for package '{s}@{any}'",
+ msg_args,
+ ),
+ else => |err| std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s} downloading package '{s}@{any}'",
+ .{
+ std.mem.span(@errorName(err)), result.name,
+ result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items),
+ },
+ ),
+ };
+
+ const name: []const u8 = switch (result.err) {
+ error.TarballFailedToExtract => "PackageExtractionError",
+ error.TarballHTTP403 => "TarballForbiddenError",
+ error.TarballHTTP404 => "TarballNotFoundError",
+ else => "TarballDownloadError",
+ };
+
+ var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis);
+ if (result.url.len > 0)
+ error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toValueGC(globalThis));
+ if (this.specifier.len > 0 and !strings.eqlComptime(this.specifier, "undefined")) {
+ error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.specifier).withEncoding().toValueGC(globalThis));
+ }
+
+ const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records[import_record_id].range, "").location.?;
+ error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(
+ this.parse_result.ast.import_records[import_record_id].path.text,
+ ).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toValueGC(globalThis));
+ error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line));
+ if (location.line_text) |line_text| {
+ error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toValueGC(globalThis));
+ }
+ error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column));
+
+ const promise_value = this.promise.swap();
+ var promise = promise_value.asInternalPromise().?;
+ promise_value.ensureStillAlive();
+ this.poll_ref.unref(vm);
+ this.deinit();
+ promise.rejectAsHandled(globalThis, error_instance);
+ }
+
+ pub fn resumeLoadingModule(this: *AsyncModule, log: *logger.Log) !ResolvedSource {
+ debug("resumeLoadingModule: {s}", .{this.specifier});
+ var parse_result = this.parse_result;
+ var path = this.path;
+ var jsc_vm = JSC.VirtualMachine.vm;
+ var specifier = this.specifier;
+ var old_log = jsc_vm.log;
+
+ jsc_vm.bundler.linker.log = log;
+ jsc_vm.bundler.log = log;
+ jsc_vm.bundler.resolver.log = log;
+ jsc_vm.packageManager().log = log;
+ defer {
+ jsc_vm.bundler.linker.log = old_log;
+ jsc_vm.bundler.log = old_log;
+ jsc_vm.bundler.resolver.log = old_log;
+ jsc_vm.packageManager().log = old_log;
+ }
+
+ // We _must_ link because:
+ // - node_modules bundle won't be properly
+ try jsc_vm.bundler.linker.link(
+ path,
+ &parse_result,
+ jsc_vm.origin,
+ .absolute_path,
+ false,
+ true,
+ );
+ this.parse_result = parse_result;
+
+ var printer = VirtualMachine.source_code_printer.?.*;
+ printer.ctx.reset();
+
+ const written = brk: {
+ defer VirtualMachine.source_code_printer.?.* = printer;
+ break :brk try jsc_vm.bundler.printWithSourceMap(
+ parse_result,
+ @TypeOf(&printer),
+ &printer,
+ .esm_ascii,
+ SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
+ );
+ };
+
+ if (written == 0) {
+ return error.PrintingErrorWriteFailed;
+ }
+
+ if (comptime Environment.dump_source) {
+ try dumpSource(specifier, &printer);
+ }
+
+ if (jsc_vm.isWatcherEnabled()) {
+ const resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, specifier, path.text, null);
+
+ if (parse_result.input_fd) |fd_| {
+ if (jsc_vm.bun_watcher != null and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) {
+ jsc_vm.bun_watcher.?.addFile(
+ fd_,
+ path.text,
+ this.hash,
+ options.Loader.fromAPI(this.loader),
+ 0,
+ this.package_json,
+ true,
+ ) catch {};
+ }
+ }
+
+ return resolved_source;
+ }
+
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(try default_allocator.dupe(u8, printer.ctx.getWritten())),
+ .specifier = ZigString.init(specifier),
+ .source_url = ZigString.init(path.text),
+ // // TODO: change hash to a bitfield
+ // .hash = 1,
+
+ // having JSC own the memory causes crashes
+ .hash = 0,
+ };
+ }
+
+ pub fn deinit(this: *AsyncModule) void {
+ this.parse_result.deinit();
+ bun.default_allocator.free(this.stmt_blocks);
+ bun.default_allocator.free(this.expr_blocks);
+ this.promise.deinit();
+ bun.default_allocator.free(this.string_buf);
+ }
+
+ extern "C" fn Bun__onFulfillAsyncModule(
+ promiseValue: JSC.JSValue,
+ res: *JSC.ErrorableResolvedSource,
+ specifier: *ZigString,
+ referrer: *ZigString,
+ ) void;
+ };
+
+ pub export fn Bun__getDefaultLoader(global: *JSC.JSGlobalObject, str: *ZigString) Api.Loader {
+ var jsc_vm = global.bunVM();
+ const filename = str.toSlice(jsc_vm.allocator);
+ defer filename.deinit();
+ const loader = jsc_vm.bundler.options.loader(Fs.PathName.init(filename.slice()).ext).toAPI();
+ if (loader == .file) {
+ return Api.Loader.js;
+ }
+
+ return loader;
+ }
+
+ pub fn transpileSourceCode(
+ jsc_vm: *VirtualMachine,
+ specifier: string,
+ referrer: string,
+ path: Fs.Path,
+ loader: options.Loader,
+ log: *logger.Log,
+ virtual_source: ?*const logger.Source,
+ ret: *ErrorableResolvedSource,
+ promise_ptr: ?*?*JSC.JSInternalPromise,
+ source_code_printer: *js_printer.BufferPrinter,
+ globalObject: ?*JSC.JSGlobalObject,
+ comptime flags: FetchFlags,
+ ) !ResolvedSource {
+ const disable_transpilying = comptime flags.disableTranspiling();
+
+ switch (loader) {
+ .js, .jsx, .ts, .tsx, .json, .toml => {
+ jsc_vm.transpiled_count += 1;
+ jsc_vm.bundler.resetStore();
+ const hash = http.Watcher.getHash(path.text);
+
+ var allocator = if (jsc_vm.has_loaded) jsc_vm.arena.allocator() else jsc_vm.allocator;
+
+ var fd: ?StoredFileDescriptorType = null;
+ var package_json: ?*PackageJSON = null;
+
+ if (jsc_vm.bun_dev_watcher) |watcher| {
+ if (watcher.indexOf(hash)) |index| {
+ const _fd = watcher.watchlist.items(.fd)[index];
+ fd = if (_fd > 0) _fd else null;
+ package_json = watcher.watchlist.items(.package_json)[index];
+ }
+ } else if (jsc_vm.bun_watcher) |watcher| {
+ if (watcher.indexOf(hash)) |index| {
+ const _fd = watcher.watchlist.items(.fd)[index];
+ fd = if (_fd > 0) _fd else null;
+ package_json = watcher.watchlist.items(.package_json)[index];
+ }
+ }
+
+ var old = jsc_vm.bundler.log;
+ jsc_vm.bundler.log = log;
+ jsc_vm.bundler.linker.log = log;
+ jsc_vm.bundler.resolver.log = log;
+ if (jsc_vm.bundler.resolver.package_manager) |pm| {
+ pm.log = log;
+ }
+
+ defer {
+ jsc_vm.bundler.log = old;
+ jsc_vm.bundler.linker.log = old;
+ jsc_vm.bundler.resolver.log = old;
+ if (jsc_vm.bundler.resolver.package_manager) |pm| {
+ pm.log = old;
+ }
+ }
+
+ // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words
+ const is_node_override = specifier.len > "/bun-vfs/node_modules/".len and strings.eqlComptimeIgnoreLen(specifier[0.."/bun-vfs/node_modules/".len], "/bun-vfs/node_modules/");
+
+ const macro_remappings = if (jsc_vm.macro_mode or !jsc_vm.has_any_macro_remappings or is_node_override)
+ MacroRemap{}
+ else
+ jsc_vm.bundler.options.macro_remap;
+
+ var fallback_source: logger.Source = undefined;
+
+ var parse_options = Bundler.ParseOptions{
+ .allocator = allocator,
+ .path = path,
+ .loader = loader,
+ .dirname_fd = 0,
+ .file_descriptor = fd,
+ .file_hash = hash,
+ .macro_remappings = macro_remappings,
+ .jsx = jsc_vm.bundler.options.jsx,
+ .virtual_source = virtual_source,
+ .hoist_bun_plugin = true,
+ };
+
+ if (is_node_override) {
+ if (NodeFallbackModules.contentsFromPath(specifier)) |code| {
+ const fallback_path = Fs.Path.initWithNamespace(specifier, "node");
+ fallback_source = logger.Source{ .path = fallback_path, .contents = code, .key_path = fallback_path };
+ parse_options.virtual_source = &fallback_source;
+ }
+ }
+
+ var parse_result = jsc_vm.bundler.parseMaybeReturnFileOnly(
+ parse_options,
+ null,
+ disable_transpilying,
+ ) orelse {
+ return error.ParseError;
+ };
+
+ if (jsc_vm.bundler.log.errors > 0) {
+ return error.ParseError;
+ }
+
+ if (comptime disable_transpilying) {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = switch (comptime flags) {
+ .print_source_and_clone => ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable),
+ .print_source => ZigString.init(parse_result.source.contents),
+ else => unreachable,
+ },
+ .specifier = ZigString.init(specifier),
+ .source_url = ZigString.init(path.text),
+ .hash = 0,
+ };
+ }
+
+ const has_bun_plugin = parse_result.ast.bun_plugin.hoisted_stmts.items.len > 0;
+
+ if (has_bun_plugin) {
+ try ModuleLoader.runBunPlugin(jsc_vm, JSC.VirtualMachine.source_code_printer.?, &parse_result, ret);
+ }
+
+ const start_count = jsc_vm.bundler.linker.import_counter;
+
+ // We _must_ link because:
+ // - node_modules bundle won't be properly
+ try jsc_vm.bundler.linker.link(
+ path,
+ &parse_result,
+ jsc_vm.origin,
+ .absolute_path,
+ false,
+ true,
+ );
+
+ if (parse_result.pending_imports.len > 0) {
+ if (promise_ptr == null) {
+ return error.UnexpectedPendingResolution;
+ }
+
+ if (jsc_vm.isWatcherEnabled()) {
+ if (parse_result.input_fd) |fd_| {
+ if (jsc_vm.bun_watcher != null and !is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) {
+ jsc_vm.bun_watcher.?.addFile(
+ fd_,
+ path.text,
+ hash,
+ loader,
+ 0,
+ package_json,
+ true,
+ ) catch {};
+ }
+ }
+ }
+
+ if (parse_result.source.contents_is_recycled) {
+ // this shared buffer is about to become owned by the AsyncModule struct
+ jsc_vm.bundler.resolver.caches.fs.resetSharedBuffer(
+ jsc_vm.bundler.resolver.caches.fs.sharedBuffer(),
+ );
+ }
+
+ jsc_vm.modules.enqueue(
+ globalObject.?,
+ .{
+ .parse_result = parse_result,
+ .path = path,
+ .loader = loader,
+ .fd = fd,
+ .package_json = package_json,
+ .hash = hash,
+ .promise_ptr = promise_ptr,
+ .specifier = specifier,
+ .referrer = referrer,
+ },
+ );
+ return error.AsyncModule;
+ }
+
+ if (!jsc_vm.macro_mode)
+ jsc_vm.resolved_count += jsc_vm.bundler.linker.import_counter - start_count;
+ jsc_vm.bundler.linker.import_counter = 0;
+
+ var printer = source_code_printer.*;
+ printer.ctx.reset();
+
+ const written = brk: {
+ defer source_code_printer.* = printer;
+ break :brk try jsc_vm.bundler.printWithSourceMap(
+ parse_result,
+ @TypeOf(&printer),
+ &printer,
+ .esm_ascii,
+ SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
+ );
+ };
+
+ if (written == 0) {
+ // if it's an empty file but there were plugins
+ // we don't want it to break if you try to import from it
+ if (has_bun_plugin) {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init("// auto-generated plugin stub\nexport default undefined\n"),
+ .specifier = ZigString.init(specifier),
+ .source_url = ZigString.init(path.text),
+ // // TODO: change hash to a bitfield
+ // .hash = 1,
+
+ // having JSC own the memory causes crashes
+ .hash = 0,
+ };
+ }
+ return error.PrintingErrorWriteFailed;
+ }
+
+ if (comptime Environment.dump_source) {
+ try dumpSource(specifier, &printer);
+ }
+
+ if (jsc_vm.isWatcherEnabled()) {
+ const resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, specifier, path.text, null);
+
+ if (parse_result.input_fd) |fd_| {
+ if (jsc_vm.bun_watcher != null and !is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) {
+ jsc_vm.bun_watcher.?.addFile(
+ fd_,
+ path.text,
+ hash,
+ loader,
+ 0,
+ package_json,
+ true,
+ ) catch {};
+ }
+ }
+
+ return resolved_source;
+ }
+
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(try default_allocator.dupe(u8, printer.ctx.getWritten())),
+ .specifier = ZigString.init(specifier),
+ .source_url = ZigString.init(path.text),
+ // // TODO: change hash to a bitfield
+ // .hash = 1,
+
+ // having JSC own the memory causes crashes
+ .hash = 0,
+ };
+ },
+ // provideFetch() should be called
+ .napi => unreachable,
+ // .wasm => {
+ // jsc_vm.transpiled_count += 1;
+ // var fd: ?StoredFileDescriptorType = null;
+
+ // var allocator = if (jsc_vm.has_loaded) jsc_vm.arena.allocator() else jsc_vm.allocator;
+
+ // const hash = http.Watcher.getHash(path.text);
+ // if (jsc_vm.watcher) |watcher| {
+ // if (watcher.indexOf(hash)) |index| {
+ // const _fd = watcher.watchlist.items(.fd)[index];
+ // fd = if (_fd > 0) _fd else null;
+ // }
+ // }
+
+ // var parse_options = Bundler.ParseOptions{
+ // .allocator = allocator,
+ // .path = path,
+ // .loader = loader,
+ // .dirname_fd = 0,
+ // .file_descriptor = fd,
+ // .file_hash = hash,
+ // .macro_remappings = MacroRemap{},
+ // .jsx = jsc_vm.bundler.options.jsx,
+ // };
+
+ // var parse_result = jsc_vm.bundler.parse(
+ // parse_options,
+ // null,
+ // ) orelse {
+ // return error.ParseError;
+ // };
+
+ // return ResolvedSource{
+ // .allocator = if (jsc_vm.has_loaded) &jsc_vm.allocator else null,
+ // .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable),
+ // .specifier = ZigString.init(specifier),
+ // .source_url = ZigString.init(path.text),
+ // .hash = 0,
+ // .tag = ResolvedSource.Tag.wasm,
+ // };
+ // },
+ else => {
+ return ResolvedSource{
+ .allocator = &jsc_vm.allocator,
+ .source_code = ZigString.init(try strings.quotedAlloc(jsc_vm.allocator, path.pretty)),
+ .specifier = ZigString.init(path.text),
+ .source_url = ZigString.init(path.text),
+ .hash = 0,
+ };
+ },
+ }
+ }
+
+ pub fn runBunPlugin(
+ jsc_vm: *VirtualMachine,
+ source_code_printer: *js_printer.BufferPrinter,
+ parse_result: *ParseResult,
+ ret: *ErrorableResolvedSource,
+ ) !void {
+ var printer = source_code_printer.*;
+ printer.ctx.reset();
+
+ defer printer.ctx.reset();
+ // If we start transpiling in the middle of an existing transpilation session
+ // we will hit undefined memory bugs
+ // unless we disable resetting the store until we are done transpiling
+ const prev_disable_reset = js_ast.Stmt.Data.Store.disable_reset;
+ js_ast.Stmt.Data.Store.disable_reset = true;
+ js_ast.Expr.Data.Store.disable_reset = true;
+
+ // flip the source code we use
+ // unless we're already transpiling a plugin
+ // that case could happen when
+ const was_printing_plugin = jsc_vm.is_printing_plugin;
+ const prev = jsc_vm.bundler.resolver.caches.fs.use_alternate_source_cache;
+ jsc_vm.is_printing_plugin = true;
+ defer {
+ js_ast.Stmt.Data.Store.disable_reset = prev_disable_reset;
+ js_ast.Expr.Data.Store.disable_reset = prev_disable_reset;
+ if (!was_printing_plugin) jsc_vm.bundler.resolver.caches.fs.use_alternate_source_cache = prev;
+ jsc_vm.is_printing_plugin = was_printing_plugin;
+ }
+ // we flip use_alternate_source_cache
+ if (!was_printing_plugin) jsc_vm.bundler.resolver.caches.fs.use_alternate_source_cache = !prev;
+
+ // this is a bad idea, but it should work for now.
+ const original_name = parse_result.ast.symbols[parse_result.ast.bun_plugin.ref.innerIndex()].original_name;
+ parse_result.ast.symbols[parse_result.ast.bun_plugin.ref.innerIndex()].original_name = "globalThis.Bun.plugin";
+ defer {
+ parse_result.ast.symbols[parse_result.ast.bun_plugin.ref.innerIndex()].original_name = original_name;
+ }
+ const hoisted_stmts = parse_result.ast.bun_plugin.hoisted_stmts.items;
+
+ var parts = [1]js_ast.Part{
+ js_ast.Part{
+ .stmts = hoisted_stmts,
+ },
+ };
+ var ast_copy = parse_result.ast;
+ ast_copy.import_records = try jsc_vm.allocator.dupe(ImportRecord, ast_copy.import_records);
+ defer jsc_vm.allocator.free(ast_copy.import_records);
+ ast_copy.parts = &parts;
+ ast_copy.prepend_part = null;
+ var temporary_source = parse_result.source;
+ var source_name = try std.fmt.allocPrint(jsc_vm.allocator, "{s}.plugin.{s}", .{ temporary_source.path.text, temporary_source.path.name.ext[1..] });
+ temporary_source.path = Fs.Path.init(source_name);
+
+ var temp_parse_result = parse_result.*;
+ temp_parse_result.ast = ast_copy;
+
+ try jsc_vm.bundler.linker.link(
+ temporary_source.path,
+ &temp_parse_result,
+ jsc_vm.origin,
+ .absolute_path,
+ false,
+ true,
+ );
+
+ _ = brk: {
+ defer source_code_printer.* = printer;
+ break :brk try jsc_vm.bundler.printWithSourceMapMaybe(
+ temp_parse_result.ast,
+ &temporary_source,
+ @TypeOf(&printer),
+ &printer,
+ .esm_ascii,
+ true,
+ SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
+ );
+ };
+ const wrote = printer.ctx.getWritten();
+
+ if (wrote.len > 0) {
+ if (comptime Environment.dump_source)
+ try dumpSource(temporary_source.path.text, &printer);
+
+ var exception = [1]JSC.JSValue{JSC.JSValue.zero};
+ const promise = JSC.JSModuleLoader.evaluate(
+ jsc_vm.global,
+ wrote.ptr,
+ wrote.len,
+ temporary_source.path.text.ptr,
+ temporary_source.path.text.len,
+ parse_result.source.path.text.ptr,
+ parse_result.source.path.text.len,
+ JSC.JSValue.jsUndefined(),
+ &exception,
+ );
+ if (!exception[0].isEmpty()) {
+ ret.* = JSC.ErrorableResolvedSource.err(
+ error.JSErrorObject,
+ exception[0].asVoid(),
+ );
+ return error.PluginError;
+ }
+
+ if (!promise.isEmptyOrUndefinedOrNull()) {
+ if (promise.asInternalPromise()) |promise_value| {
+ jsc_vm.waitForPromise(promise_value);
+
+ if (promise_value.status(jsc_vm.global.vm()) == .Rejected) {
+ ret.* = JSC.ErrorableResolvedSource.err(
+ error.JSErrorObject,
+ promise_value.result(jsc_vm.global.vm()).asVoid(),
+ );
+ return error.PluginError;
+ }
+ }
+ }
+ }
+ }
+ pub fn normalizeSpecifier(jsc_vm: *VirtualMachine, slice_: string) string {
+ var slice = slice_;
+ if (slice.len == 0) return slice;
+ var was_http = false;
+ if (strings.hasPrefixComptime(slice, "https://")) {
+ slice = slice["https://".len..];
+ was_http = true;
+ } else if (strings.hasPrefixComptime(slice, "http://")) {
+ slice = slice["http://".len..];
+ was_http = true;
+ }
+
+ if (strings.hasPrefix(slice, jsc_vm.origin.host)) {
+ slice = slice[jsc_vm.origin.host.len..];
+ } else if (was_http) {
+ if (strings.indexOfChar(slice, '/')) |i| {
+ slice = slice[i..];
+ }
+ }
+
+ if (jsc_vm.origin.path.len > 1) {
+ if (strings.hasPrefix(slice, jsc_vm.origin.path)) {
+ slice = slice[jsc_vm.origin.path.len..];
+ }
+ }
+
+ if (jsc_vm.bundler.options.routes.asset_prefix_path.len > 0) {
+ if (strings.hasPrefix(slice, jsc_vm.bundler.options.routes.asset_prefix_path)) {
+ slice = slice[jsc_vm.bundler.options.routes.asset_prefix_path.len..];
+ }
+ }
+
+ return slice;
+ }
+
+ pub export fn Bun__fetchBuiltinModule(
+ jsc_vm: *VirtualMachine,
+ globalObject: *JSC.JSGlobalObject,
+ specifier: *ZigString,
+ referrer: *ZigString,
+ ret: *ErrorableResolvedSource,
+ ) bool {
+ JSC.markBinding(@src());
+ var log = logger.Log.init(jsc_vm.bundler.allocator);
+ defer log.deinit();
+ if (ModuleLoader.fetchBuiltinModule(jsc_vm, specifier.slice(), &log, false) catch |err| {
+ if (err == error.AsyncModule) {
+ unreachable;
+ }
+ VirtualMachine.processFetchLog(globalObject, specifier.*, referrer.*, &log, ret, err);
+ return true;
+ }) |builtin| {
+ ret.* = ErrorableResolvedSource.ok(builtin);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ pub export fn Bun__transpileFile(
+ jsc_vm: *VirtualMachine,
+ globalObject: *JSC.JSGlobalObject,
+ specifier_ptr: *ZigString,
+ referrer: *ZigString,
+ ret: *ErrorableResolvedSource,
+ allow_promise: bool,
+ ) ?*anyopaque {
+ JSC.markBinding(@src());
+ var log = logger.Log.init(jsc_vm.bundler.allocator);
+ defer log.deinit();
+ var _specifier = specifier_ptr.toSlice(jsc_vm.allocator);
+ var referrer_slice = referrer.toSlice(jsc_vm.allocator);
+ defer _specifier.deinit();
+ defer referrer_slice.deinit();
+ var specifier = normalizeSpecifier(jsc_vm, _specifier.slice());
+ const path = Fs.Path.init(specifier);
+ const loader = jsc_vm.bundler.options.loaders.get(path.name.ext) orelse options.Loader.js;
+ var promise: ?*JSC.JSInternalPromise = null;
+ ret.* = ErrorableResolvedSource.ok(
+ ModuleLoader.transpileSourceCode(
+ jsc_vm,
+ specifier,
+ referrer_slice.slice(),
+ path,
+ loader,
+ &log,
+ null,
+ ret,
+ if (allow_promise) &promise else null,
+ VirtualMachine.source_code_printer.?,
+ globalObject,
+ FetchFlags.transpile,
+ ) catch |err| {
+ if (err == error.AsyncModule) {
+ std.debug.assert(promise != null);
+ return promise;
+ }
+
+ if (err == error.PluginError) {
+ return null;
+ }
+ VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer.*, &log, ret, err);
+ return null;
+ },
+ );
+ return promise;
+ }
+
+ export fn Bun__runVirtualModule(globalObject: *JSC.JSGlobalObject, specifier_ptr: *ZigString) JSValue {
+ JSC.markBinding(@src());
+ if (globalObject.bunVM().plugin_runner == null) return JSValue.zero;
+
+ const specifier = specifier_ptr.slice();
+
+ if (!PluginRunner.couldBePlugin(specifier)) {
+ return JSValue.zero;
+ }
+
+ const namespace = PluginRunner.extractNamespace(specifier);
+ const after_namespace = if (namespace.len == 0)
+ specifier
+ else
+ specifier[@minimum(namespace.len + 1, specifier.len)..];
+
+ return globalObject.runOnLoadPlugins(ZigString.init(namespace), ZigString.init(after_namespace), .bun) orelse return JSValue.zero;
+ }
+
+ const shared_library_suffix = if (Environment.isMac) "dylib" else if (Environment.isLinux) "so" else "";
+
+ pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: string, log: *logger.Log, comptime disable_transpilying: bool) !?ResolvedSource {
+ if (jsc_vm.node_modules != null and strings.eqlComptime(specifier, JSC.bun_file_import_path)) {
+ // We kind of need an abstraction around this.
+ // Basically we should subclass JSC::SourceCode with:
+ // - hash
+ // - file descriptor for source input
+ // - file path + file descriptor for bytecode caching
+ // - separate bundles for server build vs browser build OR at least separate sections
+ const code = try jsc_vm.node_modules.?.readCodeAsStringSlow(jsc_vm.allocator);
+
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(code),
+ .specifier = ZigString.init(JSC.bun_file_import_path),
+ .source_url = ZigString.init(JSC.bun_file_import_path[1..]),
+ .hash = 0, // TODO
+ };
+ } else if (jsc_vm.node_modules == null and strings.eqlComptime(specifier, Runtime.Runtime.Imports.Name)) {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(Runtime.Runtime.sourceContentBun()),
+ .specifier = ZigString.init(Runtime.Runtime.Imports.Name),
+ .source_url = ZigString.init(Runtime.Runtime.Imports.Name),
+ .hash = Runtime.Runtime.versionHash(),
+ };
+ } else if (HardcodedModule.Map.get(specifier)) |hardcoded| {
+ switch (hardcoded) {
+ // This is all complicated because the imports have to be linked and we want to run the printer on it
+ // so it consistently handles bundled imports
+ // we can't take the shortcut of just directly importing the file, sadly.
+ .@"bun:main" => {
+ if (comptime disable_transpilying) {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsc_vm.entry_point.source.contents),
+ .specifier = ZigString.init(std.mem.span(JSC.VirtualMachine.main_file_name)),
+ .source_url = ZigString.init(std.mem.span(JSC.VirtualMachine.main_file_name)),
+ .hash = 0,
+ };
+ }
+ defer jsc_vm.transpiled_count += 1;
+
+ var bundler = &jsc_vm.bundler;
+ var old = jsc_vm.bundler.log;
+ jsc_vm.bundler.log = log;
+ jsc_vm.bundler.linker.log = log;
+ jsc_vm.bundler.resolver.log = log;
+ defer {
+ jsc_vm.bundler.log = old;
+ jsc_vm.bundler.linker.log = old;
+ jsc_vm.bundler.resolver.log = old;
+ }
+
+ var jsx = bundler.options.jsx;
+ jsx.parse = false;
+ var opts = js_parser.Parser.Options.init(jsx, .js);
+ opts.enable_bundling = false;
+ opts.transform_require_to_import = false;
+ opts.features.dynamic_require = true;
+ opts.can_import_from_bundle = bundler.options.node_modules_bundle != null;
+ opts.features.hot_module_reloading = false;
+ opts.features.react_fast_refresh = false;
+ opts.filepath_hash_for_hmr = 0;
+ opts.warn_about_unbundled_modules = false;
+ opts.macro_context = &jsc_vm.bundler.macro_context.?;
+ const main_ast = (bundler.resolver.caches.js.parse(jsc_vm.allocator, opts, bundler.options.define, bundler.log, &jsc_vm.entry_point.source) catch null) orelse {
+ return error.ParseError;
+ };
+ var parse_result = ParseResult{ .source = jsc_vm.entry_point.source, .ast = main_ast, .loader = .js, .input_fd = null };
+ var file_path = Fs.Path.init(bundler.fs.top_level_dir);
+ file_path.name.dir = bundler.fs.top_level_dir;
+ file_path.name.base = "bun:main";
+ try bundler.linker.link(
+ file_path,
+ &parse_result,
+ jsc_vm.origin,
+ .absolute_path,
+ false,
+ true,
+ );
+ var printer = JSC.VirtualMachine.source_code_printer.?.*;
+ var written: usize = undefined;
+ printer.ctx.reset();
+ {
+ defer JSC.VirtualMachine.source_code_printer.?.* = printer;
+ written = try jsc_vm.bundler.printWithSourceMap(
+ parse_result,
+ @TypeOf(&printer),
+ &printer,
+ .esm_ascii,
+ SavedSourceMap.SourceMapHandler.init(&jsc_vm.source_mappings),
+ );
+ }
+
+ if (comptime Environment.dump_source)
+ try dumpSource(JSC.VirtualMachine.main_file_name, &printer);
+
+ if (written == 0) {
+ return error.PrintingErrorWriteFailed;
+ }
+
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, printer.ctx.written) catch unreachable),
+ .specifier = ZigString.init(std.mem.span(JSC.VirtualMachine.main_file_name)),
+ .source_url = ZigString.init(std.mem.span(JSC.VirtualMachine.main_file_name)),
+ .hash = 0,
+ };
+ },
+ .@"bun:jsc" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "bun-jsc.exports.js")),
+ .specifier = ZigString.init("bun:jsc"),
+ .source_url = ZigString.init("bun:jsc"),
+ .hash = 0,
+ };
+ },
+ .@"node:child_process" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "child_process.exports.js")),
+ .specifier = ZigString.init("node:child_process"),
+ .source_url = ZigString.init("node:child_process"),
+ .hash = 0,
+ };
+ },
+ .@"node:net" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "net.exports.js")),
+ .specifier = ZigString.init("node:net"),
+ .source_url = ZigString.init("node:net"),
+ .hash = 0,
+ };
+ },
+ .@"node:fs" => {
+ if (comptime Environment.isDebug) {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(strings.append(bun.default_allocator, jsModuleFromFile(jsc_vm.load_builtins_from_path, "fs.exports.js"), JSC.Node.fs.constants_string) catch unreachable),
+ .specifier = ZigString.init("node:fs"),
+ .source_url = ZigString.init("node:fs"),
+ .hash = 0,
+ };
+ }
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(@embedFile("fs.exports.js") ++ JSC.Node.fs.constants_string),
+ .specifier = ZigString.init("node:fs"),
+ .source_url = ZigString.init("node:fs"),
+ .hash = 0,
+ };
+ },
+ .@"node:buffer" => return jsSyntheticModule(.@"node:buffer"),
+ .@"node:string_decoder" => return jsSyntheticModule(.@"node:string_decoder"),
+ .@"node:module" => return jsSyntheticModule(.@"node:module"),
+ .@"node:events" => return jsSyntheticModule(.@"node:events"),
+ .@"node:process" => return jsSyntheticModule(.@"node:process"),
+ .@"node:tty" => return jsSyntheticModule(.@"node:tty"),
+ .@"node:stream" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "streams.exports.js")),
+ .specifier = ZigString.init("node:stream"),
+ .source_url = ZigString.init("node:stream"),
+ .hash = 0,
+ };
+ },
+
+ .@"node:fs/promises" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(@embedFile("fs_promises.exports.js") ++ JSC.Node.fs.constants_string),
+ .specifier = ZigString.init("node:fs/promises"),
+ .source_url = ZigString.init("node:fs/promises"),
+ .hash = 0,
+ };
+ },
+ .@"node:path" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "path.exports.js")),
+ .specifier = ZigString.init("node:path"),
+ .source_url = ZigString.init("node:path"),
+ .hash = 0,
+ };
+ },
+ .@"node:path/win32" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "path-win32.exports.js")),
+ .specifier = ZigString.init("node:path/win32"),
+ .source_url = ZigString.init("node:path/win32"),
+ .hash = 0,
+ };
+ },
+ .@"node:path/posix" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "path-posix.exports.js")),
+ .specifier = ZigString.init("node:path/posix"),
+ .source_url = ZigString.init("node:path/posix"),
+ .hash = 0,
+ };
+ },
+
+ .@"node:os" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(jsModuleFromFile(jsc_vm.load_builtins_from_path, "os.exports.js")),
+ .specifier = ZigString.init("node:os"),
+ .source_url = ZigString.init("node:os"),
+ .hash = 0,
+ };
+ },
+ .@"bun:ffi" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ "export const FFIType = " ++
+ JSC.FFI.ABIType.map_to_js_object ++
+ ";\n\n" ++
+ "export const suffix = '" ++ shared_library_suffix ++ "';\n\n" ++
+ @embedFile("ffi.exports.js") ++
+ "\n",
+ ),
+ .specifier = ZigString.init("bun:ffi"),
+ .source_url = ZigString.init("bun:ffi"),
+ .hash = 0,
+ };
+ },
+ .@"detect-libc" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, @embedFile(if (Environment.isLinux) "detect-libc.linux.js" else "detect-libc.js")),
+ ),
+ .specifier = ZigString.init("detect-libc"),
+ .source_url = ZigString.init("detect-libc"),
+ .hash = 0,
+ };
+ },
+ .@"node:url" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "url.exports.js")),
+ ),
+ .specifier = ZigString.init("node:url"),
+ .source_url = ZigString.init("node:url"),
+ .hash = 0,
+ };
+ },
+ .@"node:assert" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "assert.exports.js")),
+ ),
+ .specifier = ZigString.init("node:assert"),
+ .source_url = ZigString.init("node:assert"),
+ .hash = 0,
+ };
+ },
+ .@"bun:sqlite" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./bindings/sqlite/sqlite.exports.js")),
+ ),
+ .specifier = ZigString.init("bun:sqlite"),
+ .source_url = ZigString.init("bun:sqlite"),
+ .hash = 0,
+ };
+ },
+ .@"node:perf_hooks" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./perf_hooks.exports.js")),
+ ),
+ .specifier = ZigString.init("node:perf_hooks"),
+ .source_url = ZigString.init("node:perf_hooks"),
+ .hash = 0,
+ };
+ },
+ .@"ws" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./ws.exports.js")),
+ ),
+ .specifier = ZigString.init("ws"),
+ .source_url = ZigString.init("ws"),
+ .hash = 0,
+ };
+ },
+ .@"node:timers" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_timers.exports.js")),
+ ),
+ .specifier = ZigString.init("node:timers"),
+ .source_url = ZigString.init("node:timers"),
+ .hash = 0,
+ };
+ },
+ .@"node:timers/promises" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_timers_promises.exports.js")),
+ ),
+ .specifier = ZigString.init("node:timers/promises"),
+ .source_url = ZigString.init("node:timers/promises"),
+ .hash = 0,
+ };
+ },
+ .@"node:stream/web" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_streams_web.exports.js")),
+ ),
+ .specifier = ZigString.init("node:stream/web"),
+ .source_url = ZigString.init("node:stream/web"),
+ .hash = 0,
+ };
+ },
+ .@"node:stream/consumer" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./node_streams_consumer.exports.js")),
+ ),
+ .specifier = ZigString.init("node:stream/consumer"),
+ .source_url = ZigString.init("node:stream/consumer"),
+ .hash = 0,
+ };
+ },
+ .@"undici" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./undici.exports.js")),
+ ),
+ .specifier = ZigString.init("undici"),
+ .source_url = ZigString.init("undici"),
+ .hash = 0,
+ };
+ },
+ .@"node:http" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./http.exports.js")),
+ ),
+ .specifier = ZigString.init("node:http"),
+ .source_url = ZigString.init("node:http"),
+ .hash = 0,
+ };
+ },
+ .@"node:https" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./https.exports.js")),
+ ),
+ .specifier = ZigString.init("node:https"),
+ .source_url = ZigString.init("node:https"),
+ .hash = 0,
+ };
+ },
+ .@"depd" => {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(
+ @as(string, jsModuleFromFile(jsc_vm.load_builtins_from_path, "./depd.exports.js")),
+ ),
+ .specifier = ZigString.init("depd"),
+ .source_url = ZigString.init("depd"),
+ .hash = 0,
+ };
+ },
+ }
+ } else if (specifier.len > js_ast.Macro.namespaceWithColon.len and
+ strings.eqlComptimeIgnoreLen(specifier[0..js_ast.Macro.namespaceWithColon.len], js_ast.Macro.namespaceWithColon))
+ {
+ if (jsc_vm.macro_entry_points.get(MacroEntryPoint.generateIDFromSpecifier(specifier))) |entry| {
+ return ResolvedSource{
+ .allocator = null,
+ .source_code = ZigString.init(entry.source.contents),
+ .specifier = ZigString.init(specifier),
+ .source_url = ZigString.init(specifier),
+ .hash = 0,
+ };
+ }
+ }
+
+ return null;
+ }
+
+ export fn Bun__transpileVirtualModule(
+ globalObject: *JSC.JSGlobalObject,
+ specifier_ptr: *ZigString,
+ referrer_ptr: *ZigString,
+ source_code: *ZigString,
+ loader_: Api.Loader,
+ ret: *ErrorableResolvedSource,
+ ) bool {
+ JSC.markBinding(@src());
+ const jsc_vm = globalObject.bunVM();
+ std.debug.assert(jsc_vm.plugin_runner != null);
+
+ var specifier_slice = specifier_ptr.toSlice(jsc_vm.allocator);
+ const specifier = specifier_slice.slice();
+ defer specifier_slice.deinit();
+ var source_code_slice = source_code.toSlice(jsc_vm.allocator);
+ defer source_code_slice.deinit();
+ var referrer_slice = referrer_ptr.toSlice(jsc_vm.allocator);
+ defer referrer_slice.deinit();
+
+ var virtual_source = logger.Source.initPathString(specifier, source_code_slice.slice());
+ var log = logger.Log.init(jsc_vm.allocator);
+ const path = Fs.Path.init(specifier);
+
+ const loader = if (loader_ != ._none)
+ options.Loader.fromString(@tagName(loader_)).?
+ else
+ jsc_vm.bundler.options.loaders.get(path.name.ext) orelse brk: {
+ if (strings.eqlLong(specifier, jsc_vm.main, true)) {
+ break :brk options.Loader.js;
+ }
+
+ break :brk options.Loader.file;
+ };
+
+ defer log.deinit();
+ ret.* = ErrorableResolvedSource.ok(
+ ModuleLoader.transpileSourceCode(
+ jsc_vm,
+ specifier,
+ referrer_slice.slice(),
+ path,
+ options.Loader.fromString(@tagName(loader)).?,
+ &log,
+ &virtual_source,
+ ret,
+ null,
+ VirtualMachine.source_code_printer.?,
+ globalObject,
+ FetchFlags.transpile,
+ ) catch |err| {
+ if (err == error.PluginError) {
+ return true;
+ }
+ VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer_ptr.*, &log, ret, err);
+ return true;
+ },
+ );
+ return true;
+ }
+
+ comptime {
+ _ = Bun__transpileVirtualModule;
+ _ = Bun__runVirtualModule;
+ _ = Bun__transpileFile;
+ _ = Bun__fetchBuiltinModule;
+ _ = Bun__getDefaultLoader;
+ }
+};
+
+pub const FetchFlags = enum {
+ transpile,
+ print_source,
+ print_source_and_clone,
+
+ pub fn disableTranspiling(this: FetchFlags) bool {
+ return this != .transpile;
+ }
+};
+
+const SavedSourceMap = JSC.SavedSourceMap;
+
+pub const HardcodedModule = enum {
+ @"bun:ffi",
+ @"bun:jsc",
+ @"bun:main",
+ @"bun:sqlite",
+ @"depd",
+ @"detect-libc",
+ @"node:assert",
+ @"node:buffer",
+ @"node:child_process",
+ @"node:events",
+ @"node:fs",
+ @"node:fs/promises",
+ @"node:http",
+ @"node:https",
+ @"node:module",
+ @"node:net",
+ @"node:os",
+ @"node:path",
+ @"node:path/posix",
+ @"node:path/win32",
+ @"node:perf_hooks",
+ @"node:process",
+ @"node:stream",
+ @"node:stream/consumer",
+ @"node:stream/web",
+ @"node:string_decoder",
+ @"node:timers",
+ @"node:timers/promises",
+ @"node:tty",
+ @"node:url",
+ @"undici",
+ @"ws",
+ /// Already resolved modules go in here.
+ /// This does not remap the module name, it is just a hash table.
+ /// Do not put modules that have aliases in here
+ /// Put those in Aliases
+ pub const Map = bun.ComptimeStringMap(
+ HardcodedModule,
+ .{
+ .{ "buffer", HardcodedModule.@"node:buffer" },
+ .{ "bun:ffi", HardcodedModule.@"bun:ffi" },
+ .{ "bun:jsc", HardcodedModule.@"bun:jsc" },
+ .{ "bun:main", HardcodedModule.@"bun:main" },
+ .{ "bun:sqlite", HardcodedModule.@"bun:sqlite" },
+ .{ "depd", HardcodedModule.@"depd" },
+ .{ "detect-libc", HardcodedModule.@"detect-libc" },
+ .{ "node:assert", HardcodedModule.@"node:assert" },
+ .{ "node:buffer", HardcodedModule.@"node:buffer" },
+ .{ "node:child_process", HardcodedModule.@"node:child_process" },
+ .{ "node:events", HardcodedModule.@"node:events" },
+ .{ "node:fs", HardcodedModule.@"node:fs" },
+ .{ "node:fs/promises", HardcodedModule.@"node:fs/promises" },
+ .{ "node:http", HardcodedModule.@"node:http" },
+ .{ "node:https", HardcodedModule.@"node:https" },
+ .{ "node:module", HardcodedModule.@"node:module" },
+ .{ "node:net", HardcodedModule.@"node:net" },
+ .{ "node:os", HardcodedModule.@"node:os" },
+ .{ "node:path", HardcodedModule.@"node:path" },
+ .{ "node:path/posix", HardcodedModule.@"node:path/posix" },
+ .{ "node:path/win32", HardcodedModule.@"node:path/win32" },
+ .{ "node:perf_hooks", HardcodedModule.@"node:perf_hooks" },
+ .{ "node:process", HardcodedModule.@"node:process" },
+ .{ "node:stream", HardcodedModule.@"node:stream" },
+ .{ "node:stream/consumer", HardcodedModule.@"node:stream/consumer" },
+ .{ "node:stream/web", HardcodedModule.@"node:stream/web" },
+ .{ "node:string_decoder", HardcodedModule.@"node:string_decoder" },
+ .{ "node:timers", HardcodedModule.@"node:timers" },
+ .{ "node:timers/promises", HardcodedModule.@"node:timers/promises" },
+ .{ "node:tty", HardcodedModule.@"node:tty" },
+ .{ "node:url", HardcodedModule.@"node:url" },
+ .{ "undici", HardcodedModule.@"undici" },
+ .{ "ws", HardcodedModule.@"ws" },
+ },
+ );
+ pub const Aliases = bun.ComptimeStringMap(
+ string,
+ .{
+ .{ "assert", "node:assert" },
+ .{ "buffer", "node:buffer" },
+ .{ "bun", "bun" },
+ .{ "bun:ffi", "bun:ffi" },
+ .{ "bun:jsc", "bun:jsc" },
+ .{ "bun:sqlite", "bun:sqlite" },
+ .{ "bun:wrap", "bun:wrap" },
+ .{ "child_process", "node:child_process" },
+ .{ "depd", "depd" },
+ .{ "detect-libc", "detect-libc" },
+ .{ "detect-libc/lib/detect-libc.js", "detect-libc" },
+ .{ "events", "node:events" },
+ .{ "ffi", "bun:ffi" },
+ .{ "fs", "node:fs" },
+ .{ "fs/promises", "node:fs/promises" },
+ .{ "http", "node:http" },
+ .{ "https", "node:https" },
+ .{ "module", "node:module" },
+ .{ "net", "node:net" },
+ .{ "node:assert", "node:assert" },
+ .{ "node:buffer", "node:buffer" },
+ .{ "node:child_process", "node:child_process" },
+ .{ "node:events", "node:events" },
+ .{ "node:fs", "node:fs" },
+ .{ "node:fs/promises", "node:fs/promises" },
+ .{ "node:http", "node:http" },
+ .{ "node:https", "node:https" },
+ .{ "node:module", "node:module" },
+ .{ "node:net", "node:net" },
+ .{ "node:os", "node:os" },
+ .{ "node:path", "node:path" },
+ .{ "node:path/posix", "node:path/posix" },
+ .{ "node:path/win32", "node:path/win32" },
+ .{ "node:perf_hooks", "node:perf_hooks" },
+ .{ "node:process", "node:process" },
+ .{ "node:stream", "node:stream" },
+ .{ "node:stream/consumer", "node:stream/consumer" },
+ .{ "node:stream/web", "node:stream/web" },
+ .{ "node:string_decoder", "node:string_decoder" },
+ .{ "node:timers", "node:timers" },
+ .{ "node:timers/promises", "node:timers/promises" },
+ .{ "node:tty", "node:tty" },
+ .{ "node:url", "node:url" },
+ .{ "os", "node:os" },
+ .{ "path", "node:path" },
+ .{ "path/posix", "node:path/posix" },
+ .{ "path/win32", "node:path/win32" },
+ .{ "perf_hooks", "node:perf_hooks" },
+ .{ "process", "node:process" },
+ .{ "stream", "node:stream" },
+ .{ "stream/consumer", "node:stream/consumer" },
+ .{ "stream/web", "node:stream/web" },
+ .{ "string_decoder", "node:string_decoder" },
+ .{ "timers", "node:timers" },
+ .{ "timers/promises", "node:timers/promises" },
+ .{ "tty", "node:tty" },
+ .{ "undici", "undici" },
+ .{ "url", "node:url" },
+ .{ "ws", "ws" },
+ .{ "ws/lib/websocket", "ws" },
+ },
+ );
+};
+
+pub const DisabledModule = bun.ComptimeStringMap(
+ void,
+ .{
+ .{"node:tls"},
+ .{"node:worker_threads"},
+ .{"tls"},
+ .{"worker_threads"},
+ },
+);
diff --git a/src/bun_js.zig b/src/bun_js.zig
index f4073ee15..d406033a1 100644
--- a/src/bun_js.zig
+++ b/src/bun_js.zig
@@ -28,12 +28,12 @@ const bundler = @import("bundler.zig");
const NodeModuleBundle = @import("node_module_bundle.zig").NodeModuleBundle;
const DotEnv = @import("env_loader.zig");
const which = @import("which.zig").which;
-const VirtualMachine = @import("javascript_core").VirtualMachine;
const JSC = @import("javascript_core");
const AsyncHTTP = @import("http").AsyncHTTP;
const Arena = @import("./mimalloc_arena.zig").Arena;
const OpaqueWrap = JSC.OpaqueWrap;
+const VirtualMachine = JSC.VirtualMachine;
pub const Run = struct {
file: std.fs.File,
@@ -60,6 +60,16 @@ pub const Run = struct {
run.vm.argv = ctx.passthrough;
run.vm.arena = &run.arena;
+ run.vm.bundler.options.install = ctx.install;
+ run.vm.bundler.resolver.opts.install = ctx.install;
+ run.vm.bundler.resolver.opts.global_cache = ctx.debug.global_cache;
+ run.vm.bundler.resolver.opts.prefer_offline_install = (ctx.debug.offline_mode_setting orelse .online) == .offline;
+ run.vm.bundler.resolver.opts.prefer_latest_install = (ctx.debug.offline_mode_setting orelse .online) == .latest;
+ run.vm.bundler.options.global_cache = run.vm.bundler.resolver.opts.global_cache;
+ run.vm.bundler.options.prefer_offline_install = run.vm.bundler.resolver.opts.prefer_offline_install;
+ run.vm.bundler.options.prefer_latest_install = run.vm.bundler.resolver.opts.prefer_latest_install;
+ run.vm.bundler.resolver.env_loader = run.vm.bundler.env;
+
if (ctx.debug.macros) |macros| {
run.vm.bundler.options.macro_remap = macros;
}
@@ -116,6 +126,9 @@ pub const Run = struct {
}
}
+ run.vm.is_main_thread = true;
+ JSC.VirtualMachine.is_main_thread_vm = true;
+
var callback = OpaqueWrap(Run, Run.start);
run.vm.global.vm().holdAPILock(&run, callback);
}
diff --git a/src/bundler.zig b/src/bundler.zig
index 93073123f..8c9cdb769 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -54,6 +54,7 @@ const Linker = linker.Linker;
const Resolver = _resolver.Resolver;
const TOML = @import("./toml/toml_parser.zig").TOML;
const JSC = @import("javascript_core");
+const PackageManager = @import("./install/install.zig").PackageManager;
pub fn MacroJSValueType_() type {
if (comptime JSC.is_bindgen) {
@@ -123,6 +124,23 @@ pub const ParseResult = struct {
ast: js_ast.Ast,
input_fd: ?StoredFileDescriptorType = null,
empty: bool = false,
+ pending_imports: _resolver.PendingResolution.List = .{},
+
+ pub fn isPendingImport(this: *const ParseResult, id: u32) bool {
+ const import_record_ids = this.pending_imports.items(.import_record_id);
+
+ return std.mem.indexOfScalar(u32, import_record_ids, id) != null;
+ }
+
+ /// **DO NOT CALL THIS UNDER NORMAL CIRCUMSTANCES**
+ /// Normally, we allocate each AST in an arena and free all at once
+ /// So this function only should be used when we globally allocate an AST
+ pub fn deinit(this: *ParseResult) void {
+ _resolver.PendingResolution.deinitListItems(this.pending_imports, bun.default_allocator);
+ this.pending_imports.deinit(bun.default_allocator);
+ this.ast.deinit();
+ bun.default_allocator.free(bun.constStrToU8(this.source.contents));
+ }
};
const cache_files = false;
@@ -337,15 +355,12 @@ pub const PluginRunner = struct {
};
pub const Bundler = struct {
- const ThisBundler = @This();
-
options: options.BundleOptions,
log: *logger.Log,
allocator: std.mem.Allocator,
result: options.TransformResult = undefined,
resolver: Resolver,
fs: *Fs.FileSystem,
- // thread_pool: *ThreadPool,
output_files: std.ArrayList(options.OutputFile),
resolve_results: *ResolveResults,
resolve_queue: ResolveQueue,
@@ -361,7 +376,7 @@ pub const Bundler = struct {
pub const isCacheEnabled = cache_files;
- pub fn clone(this: *ThisBundler, allocator: std.mem.Allocator, to: *ThisBundler) !void {
+ pub fn clone(this: *Bundler, allocator: std.mem.Allocator, to: *Bundler) !void {
to.* = this.*;
to.setAllocator(allocator);
to.log = try allocator.create(logger.Log);
@@ -370,19 +385,23 @@ pub const Bundler = struct {
to.macro_context = null;
}
- pub fn setLog(this: *ThisBundler, log: *logger.Log) void {
+ pub inline fn getPackageManager(this: *Bundler) *PackageManager {
+ return this.resolver.getPackageManager();
+ }
+
+ pub fn setLog(this: *Bundler, log: *logger.Log) void {
this.log = log;
this.linker.log = log;
this.resolver.log = log;
}
- pub fn setAllocator(this: *ThisBundler, allocator: std.mem.Allocator) void {
+ pub fn setAllocator(this: *Bundler, allocator: std.mem.Allocator) void {
this.allocator = allocator;
this.linker.allocator = allocator;
this.resolver.allocator = allocator;
}
- pub inline fn resolveEntryPoint(bundler: *ThisBundler, entry_point: string) !_resolver.Result {
+ pub inline fn resolveEntryPoint(bundler: *Bundler, entry_point: string) !_resolver.Result {
return bundler.resolver.resolve(bundler.fs.top_level_dir, entry_point, .entry_point) catch |err| {
const has_dot_slash_form = !strings.hasPrefix(entry_point, "./") and brk: {
_ = bundler.resolver.resolve(bundler.fs.top_level_dir, try strings.append(bundler.allocator, "./", entry_point), .entry_point) catch break :brk false;
@@ -403,17 +422,13 @@ pub const Bundler = struct {
};
}
- // to_bundle:
-
- // thread_pool: *ThreadPool,
-
pub fn init(
allocator: std.mem.Allocator,
log: *logger.Log,
opts: Api.TransformOptions,
existing_bundle: ?*NodeModuleBundle,
env_loader_: ?*DotEnv.Loader,
- ) !ThisBundler {
+ ) !Bundler {
js_ast.Expr.Data.Store.create(allocator);
js_ast.Stmt.Data.Store.create(allocator);
var fs = try Fs.FileSystem.init1(
@@ -449,7 +464,7 @@ pub const Bundler = struct {
// });
var resolve_results = try allocator.create(ResolveResults);
resolve_results.* = ResolveResults.init(allocator);
- return ThisBundler{
+ return Bundler{
.options = bundle_options,
.fs = fs,
.allocator = allocator,
@@ -466,7 +481,7 @@ pub const Bundler = struct {
};
}
- pub fn configureLinkerWithAutoJSX(bundler: *ThisBundler, auto_jsx: bool) void {
+ pub fn configureLinkerWithAutoJSX(bundler: *Bundler, auto_jsx: bool) void {
bundler.linker = Linker.init(
bundler.allocator,
bundler.log,
@@ -490,11 +505,11 @@ pub const Bundler = struct {
}
}
- pub fn configureLinker(bundler: *ThisBundler) void {
+ pub fn configureLinker(bundler: *Bundler) void {
bundler.configureLinkerWithAutoJSX(true);
}
- pub fn runEnvLoader(this: *ThisBundler) !void {
+ pub fn runEnvLoader(this: *Bundler) !void {
switch (this.options.env.behavior) {
.prefix, .load_all => {
// Step 1. Load the project root.
@@ -530,7 +545,7 @@ pub const Bundler = struct {
}
// This must be run after a framework is configured, if a framework is enabled
- pub fn configureDefines(this: *ThisBundler) !void {
+ pub fn configureDefines(this: *Bundler) !void {
if (this.options.defines_loaded) {
return;
}
@@ -583,7 +598,7 @@ pub const Bundler = struct {
}
pub fn configureFramework(
- this: *ThisBundler,
+ this: *Bundler,
comptime load_defines: bool,
) !void {
if (this.options.framework) |*framework| {
@@ -616,7 +631,7 @@ pub const Bundler = struct {
}
}
- pub fn configureFrameworkWithResolveResult(this: *ThisBundler, comptime client: bool) !?_resolver.Result {
+ pub fn configureFrameworkWithResolveResult(this: *Bundler, comptime client: bool) !?_resolver.Result {
if (this.options.framework != null) {
try this.configureFramework(true);
if (comptime client) {
@@ -637,7 +652,7 @@ pub const Bundler = struct {
return null;
}
- pub fn configureRouter(this: *ThisBundler, comptime load_defines: bool) !void {
+ pub fn configureRouter(this: *Bundler, comptime load_defines: bool) !void {
try this.configureFramework(load_defines);
defer {
if (load_defines) {
@@ -704,12 +719,12 @@ pub const Bundler = struct {
}
}
- pub fn resetStore(_: *const ThisBundler) void {
+ pub fn resetStore(_: *const Bundler) void {
js_ast.Expr.Data.Store.reset();
js_ast.Stmt.Data.Store.reset();
}
- pub noinline fn dumpEnvironmentVariables(bundler: *const ThisBundler) void {
+ pub noinline fn dumpEnvironmentVariables(bundler: *const Bundler) void {
@setCold(true);
const opts = std.json.StringifyOptions{
.whitespace = std.json.StringifyOptions.Whitespace{
@@ -729,7 +744,7 @@ pub const Bundler = struct {
empty: bool = false,
};
pub fn buildWithResolveResult(
- bundler: *ThisBundler,
+ bundler: *Bundler,
resolve_result: _resolver.Result,
allocator: std.mem.Allocator,
loader: options.Loader,
@@ -917,7 +932,7 @@ pub const Bundler = struct {
}
pub fn buildWithResolveResultEager(
- bundler: *ThisBundler,
+ bundler: *Bundler,
resolve_result: _resolver.Result,
comptime import_path_format: options.BundleOptions.ImportPathFormat,
comptime Outstream: type,
@@ -1108,7 +1123,7 @@ pub const Bundler = struct {
}
pub fn printWithSourceMapMaybe(
- bundler: *ThisBundler,
+ bundler: *Bundler,
ast: js_ast.Ast,
source: *const logger.Source,
comptime Writer: type,
@@ -1249,7 +1264,7 @@ pub const Bundler = struct {
}
pub fn print(
- bundler: *ThisBundler,
+ bundler: *Bundler,
result: ParseResult,
comptime Writer: type,
writer: Writer,
@@ -1267,7 +1282,7 @@ pub const Bundler = struct {
}
pub fn printWithSourceMap(
- bundler: *ThisBundler,
+ bundler: *Bundler,
result: ParseResult,
comptime Writer: type,
writer: Writer,
@@ -1301,7 +1316,7 @@ pub const Bundler = struct {
};
pub fn parse(
- bundler: *ThisBundler,
+ bundler: *Bundler,
this_parse: ParseOptions,
client_entry_point_: anytype,
) ?ParseResult {
@@ -1309,7 +1324,7 @@ pub const Bundler = struct {
}
pub fn parseMaybeReturnFileOnly(
- bundler: *ThisBundler,
+ bundler: *Bundler,
this_parse: ParseOptions,
client_entry_point_: anytype,
comptime return_file_only: bool,
@@ -1525,7 +1540,7 @@ pub const Bundler = struct {
// We try to be mostly stateless when serving
// This means we need a slightly different resolver setup
pub fn buildFile(
- bundler: *ThisBundler,
+ bundler: *Bundler,
log: *logger.Log,
path_to_use_: string,
comptime client_entry_point_enabled: bool,
@@ -1621,7 +1636,7 @@ pub const Bundler = struct {
}
}
- pub fn normalizeEntryPointPath(bundler: *ThisBundler, _entry: string) string {
+ pub fn normalizeEntryPointPath(bundler: *Bundler, _entry: string) string {
var paths = [_]string{_entry};
var entry = bundler.fs.abs(&paths);
@@ -1653,7 +1668,7 @@ pub const Bundler = struct {
return entry;
}
- fn enqueueEntryPoints(bundler: *ThisBundler, entry_points: []_resolver.Result, comptime normalize_entry_point: bool) usize {
+ fn enqueueEntryPoints(bundler: *Bundler, entry_points: []_resolver.Result, comptime normalize_entry_point: bool) usize {
var entry_point_i: usize = 0;
for (bundler.options.entry_points) |_entry| {
@@ -1690,7 +1705,7 @@ pub const Bundler = struct {
log: *logger.Log,
opts: Api.TransformOptions,
) !options.TransformResult {
- var bundler = try ThisBundler.init(allocator, log, opts, null, null);
+ var bundler = try Bundler.init(allocator, log, opts, null, null);
bundler.configureLinker();
try bundler.configureRouter(false);
try bundler.configureDefines();
@@ -1809,7 +1824,7 @@ pub const Bundler = struct {
// pub fn processResolveQueueWithThreadPool(bundler)
pub fn processResolveQueue(
- bundler: *ThisBundler,
+ bundler: *Bundler,
comptime import_path_format: options.BundleOptions.ImportPathFormat,
comptime wrap_entry_point: bool,
comptime Outstream: type,
@@ -1829,7 +1844,7 @@ pub const Bundler = struct {
if (item.import_kind == .entry_point and loader.supportsClientEntryPoint()) {
var client_entry_point = try bundler.allocator.create(EntryPoints.ClientEntryPoint);
client_entry_point.* = EntryPoints.ClientEntryPoint{};
- try client_entry_point.generate(ThisBundler, bundler, path.name, bundler.options.framework.?.client.path);
+ try client_entry_point.generate(Bundler, bundler, path.name, bundler.options.framework.?.client.path);
const entry_point_output_file = bundler.buildWithResolveResultEager(
item,
diff --git a/src/bunfig.zig b/src/bunfig.zig
index b789bd89c..d2ca86380 100644
--- a/src/bunfig.zig
+++ b/src/bunfig.zig
@@ -31,6 +31,17 @@ const TOML = @import("./toml/toml_parser.zig").TOML;
// TODO: replace Api.TransformOptions with Bunfig
pub const Bunfig = struct {
+ pub const OfflineMode = enum {
+ online,
+ latest,
+ offline,
+ };
+ pub const Prefer = bun.ComptimeStringMap(OfflineMode, .{
+ &.{ "offline", OfflineMode.offline },
+ &.{ "latest", OfflineMode.latest },
+ &.{ "online", OfflineMode.online },
+ });
+
const Parser = struct {
json: js_ast.Expr,
source: *const logger.Source,
@@ -180,7 +191,7 @@ pub const Bunfig = struct {
}
}
- if (comptime cmd.isNPMRelated()) {
+ if (comptime cmd.isNPMRelated() or cmd == .RunCommand or cmd == .AutoCommand) {
if (json.get("install")) |_bun| {
var install: *Api.BunInstall = this.ctx.install orelse brk: {
var install_ = try this.allocator.create(Api.BunInstall);
@@ -189,6 +200,33 @@ pub const Bunfig = struct {
break :brk install_;
};
+ if (json.get("auto")) |auto_install_expr| {
+ if (auto_install_expr.data == .e_string) {
+ this.ctx.debug.global_cache = options.GlobalCache.Map.get(auto_install_expr.asString(this.allocator) orelse "") orelse {
+ try this.addError(auto_install_expr.loc, "Invalid auto install setting, must be one of true, false, or \"force\" \"fallback\" \"disable\"");
+ return;
+ };
+ } else if (auto_install_expr.data == .e_boolean) {
+ this.ctx.debug.global_cache = if (auto_install_expr.asBool().?)
+ options.GlobalCache.allow_install
+ else
+ options.GlobalCache.disable;
+ } else {
+ try this.addError(auto_install_expr.loc, "Invalid auto install setting, must be one of true, false, or \"force\" \"fallback\" \"disable\"");
+ return;
+ }
+ }
+
+ if (json.get("prefer")) |prefer_expr| {
+ try this.expect(prefer_expr, .e_string);
+
+ if (Prefer.get(prefer_expr.asString(bun.default_allocator) orelse "")) |setting| {
+ this.ctx.debug.offline_mode_setting = setting;
+ } else {
+ try this.addError(prefer_expr.loc, "Invalid prefer setting, must be one of online or offline");
+ }
+ }
+
if (_bun.get("registry")) |registry| {
install.default_registry = try this.parseRegistry(registry);
}
diff --git a/src/c.zig b/src/c.zig
index 56b27fdf7..6d53607de 100644
--- a/src/c.zig
+++ b/src/c.zig
@@ -396,3 +396,26 @@ pub fn getRelease(buf: []u8) []const u8 {
return "unknown";
}
}
+
+// we only want these two symbols from this
+const WaitH = struct {
+ pub usingnamespace @cImport("sys/wait.h");
+};
+
+/// Return exit status.
+pub const WEXITSTATUS = WaitH.WEXITSTATUS;
+
+/// True if child exited normally.
+pub const WIFEXITED = WaitH.WIFEXITED;
+
+/// True if child exited due to uncaught signal.
+pub const WIFSIGNALED = WaitH.WIFSIGNALED;
+
+/// True if child is currently stopped.
+pub const WIFSTOPPED = WaitH.WIFSTOPPED;
+
+/// Return signal number that caused process to stop.
+pub const WSTOPSIG = WaitH.WSTOPSIG;
+
+/// Return signal number that caused process to terminate.
+pub const WTERMSIG = WaitH.WTERMSIG;
diff --git a/src/cache.zig b/src/cache.zig
index 24fac3380..2ff1ce896 100644
--- a/src/cache.zig
+++ b/src/cache.zig
@@ -72,6 +72,19 @@ pub const Fs = struct {
&this.macro_shared_buffer;
}
+ /// When we need to suspend/resume something that has pointers into the shared buffer, we need to
+ /// switch out the shared buffer so that it is not in use
+ /// The caller must
+ pub fn resetSharedBuffer(this: *Fs, buffer: *MutableString) void {
+ if (buffer == &this.shared_buffer) {
+ this.shared_buffer = MutableString.initEmpty(bun.default_allocator);
+ } else if (buffer == &this.macro_shared_buffer) {
+ this.macro_shared_buffer = MutableString.initEmpty(bun.default_allocator);
+ } else {
+ bun.unreachablePanic("resetSharedBuffer: invalid buffer", .{});
+ }
+ }
+
pub fn deinit(c: *Fs) void {
var iter = c.entries.iterator();
while (iter.next()) |entry| {
diff --git a/src/cli.zig b/src/cli.zig
index 80a1476c8..0e79b1e21 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -168,7 +168,7 @@ pub const Arguments = struct {
clap.parseParam("--cwd <STR> Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable,
clap.parseParam("-c, --config <PATH>? Config file to load bun from (e.g. -c bunfig.toml") catch unreachable,
clap.parseParam("--disable-react-fast-refresh Disable React Fast Refresh") catch unreachable,
- clap.parseParam("--disable-hmr Disable Hot Module Reloading (disables fast refresh too)") catch unreachable,
+ clap.parseParam("--disable-hmr Disable Hot Module Reloading (disables fast refresh too) in bun dev") catch unreachable,
clap.parseParam("--extension-order <STR>... defaults to: .tsx,.ts,.jsx,.js,.json ") catch unreachable,
clap.parseParam("--jsx-factory <STR> Changes the function called when compiling JSX elements using the classic JSX runtime") catch unreachable,
clap.parseParam("--jsx-fragment <STR> Changes the function called when compiling JSX fragments") catch unreachable,
@@ -185,11 +185,15 @@ pub const Arguments = struct {
clap.parseParam("-d, --define <STR>... Substitute K:V while parsing, e.g. --define process.env.NODE_ENV:\"development\". Values are parsed as JSON.") catch unreachable,
clap.parseParam("-e, --external <STR>... Exclude module from transpilation (can use * wildcards). ex: -e react") catch unreachable,
clap.parseParam("-h, --help Display this help and exit. ") catch unreachable,
- clap.parseParam("-i, --inject <STR>... Inject module at the top of every file") catch unreachable,
clap.parseParam("-l, --loader <STR>... Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: jsx, js, json, tsx, ts, css") catch unreachable,
clap.parseParam("-u, --origin <STR> Rewrite import URLs to start with --origin. Default: \"\"") catch unreachable,
clap.parseParam("-p, --port <STR> Port to serve bun's dev server on. Default: \"3000\"") catch unreachable,
clap.parseParam("--hot Enable auto reload in bun's JavaScript runtime") catch unreachable,
+ clap.parseParam("--no-install Disable auto install in bun's JavaScript runtime") catch unreachable,
+ clap.parseParam("-i Automatically install dependencies and use global cache in bun's runtime, equivalent to --install=fallback") catch unreachable,
+ clap.parseParam("--install <STR> Install dependencies automatically when no node_modules are present, default: \"auto\". \"force\" to ignore node_modules, fallback to install any missing") catch unreachable,
+ clap.parseParam("--prefer-offline Skip staleness checks for packages in bun's JavaScript runtime and resolve from disk") catch unreachable,
+ clap.parseParam("--prefer-latest Use the latest matching versions of packages in bun's JavaScript runtime, always checking npm") catch unreachable,
clap.parseParam("--silent Don't repeat the command for bun run") catch unreachable,
clap.parseParam("<POS>... ") catch unreachable,
};
@@ -312,7 +316,7 @@ pub const Arguments = struct {
try loadConfigPath(allocator, auto_loaded, config_path, ctx, comptime cmd);
}
- fn loadConfigWithCmdArgs(
+ pub fn loadConfigWithCmdArgs(
comptime cmd: Command.Tag,
allocator: std.mem.Allocator,
args: clap.Args(clap.Help, cmd.params()),
@@ -402,7 +406,8 @@ pub const Arguments = struct {
opts.serve = cmd == .DevCommand;
opts.main_fields = args.options("--main-fields");
opts.generate_node_module_bundle = cmd == .BunCommand;
- opts.inject = args.options("--inject");
+ // we never actually supported inject.
+ // opts.inject = args.options("--inject");
opts.extension_order = args.options("--extension-order");
ctx.debug.hot_reload = args.flag("--hot");
ctx.passthrough = args.remaining();
@@ -429,6 +434,30 @@ pub const Arguments = struct {
ctx.debug.fallback_only = ctx.debug.fallback_only or args.flag("--disable-bun.js");
ctx.debug.dump_limits = args.flag("--dump-limits");
+ ctx.debug.offline_mode_setting = if (args.flag("--prefer-offline"))
+ Bunfig.OfflineMode.offline
+ else if (args.flag("--prefer-latest"))
+ Bunfig.OfflineMode.latest
+ else
+ Bunfig.OfflineMode.online;
+
+ if (args.flag("--no-install")) {
+ ctx.debug.global_cache = .disable;
+ } else if (args.flag("-i")) {
+ ctx.debug.global_cache = .fallback;
+ } else if (args.option("--install")) |enum_value| {
+ // -i=auto --install=force, --install=disable
+ if (options.GlobalCache.Map.get(enum_value)) |result| {
+ ctx.debug.global_cache = result;
+ // -i, --install
+ } else if (enum_value.len == 0) {
+ ctx.debug.global_cache = options.GlobalCache.force;
+ } else {
+ Output.prettyErrorln("Invalid value for --install: \"{s}\". Must be either \"auto\", \"fallback\", \"force\", or \"disable\"\n", .{enum_value});
+ Global.exit(1);
+ }
+ }
+
// var output_dir = args.option("--outdir");
var output_dir: ?string = null;
const production = false;
@@ -784,6 +813,8 @@ pub const Command = struct {
fallback_only: bool = false,
silent: bool = false,
hot_reload: bool = false,
+ global_cache: options.GlobalCache = .auto,
+ offline_mode_setting: ?Bunfig.OfflineMode = null,
// technical debt
macros: ?MacroMap = null,
diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig
index 94c00edc4..1927e6027 100644
--- a/src/cli/test_command.zig
+++ b/src/cli/test_command.zig
@@ -338,6 +338,9 @@ pub const TestCommand = struct {
}
}
+ vm.is_main_thread = true;
+ JSC.VirtualMachine.is_main_thread_vm = true;
+
var scanner = Scanner{
.dirs_to_scan = Scanner.Fifo.init(ctx.allocator),
.options = &vm.bundler.options,
diff --git a/src/css_scanner.zig b/src/css_scanner.zig
index dfa04e0dd..c48a54441 100644
--- a/src/css_scanner.zig
+++ b/src/css_scanner.zig
@@ -1023,6 +1023,7 @@ pub fn NewWriter(
"Not Found - \"{s}\"",
.{import.text.utf8},
import_record.ImportKind.at,
+ err,
) catch {};
},
else => {},
diff --git a/src/global.zig b/src/global.zig
index f0e98861f..fe4f5246a 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -363,3 +363,18 @@ pub fn isWritable(fd: std.os.fd_t) bool {
return (std.os.poll(polls, 0) catch 0) != 0;
}
+
+pub inline fn unreachablePanic(comptime fmts: []const u8, args: anytype) noreturn {
+ if (comptime !Environment.allow_assert) unreachable;
+ std.debug.panic(fmts, args);
+}
+
+pub fn StringEnum(comptime Type: type, comptime Map: anytype, value: []const u8) ?Type {
+ return ComptimeStringMap(Type, Map).get(value);
+}
+
+pub const Bunfig = @import("./bunfig.zig").Bunfig;
+
+pub const HTTPThead = @import("./http_client_async.zig").HTTPThread;
+
+pub const Analytics = @import("./analytics/analytics_thread.zig");
diff --git a/src/install/dependency.zig b/src/install/dependency.zig
index f4c3c7173..e97cc92f9 100644
--- a/src/install/dependency.zig
+++ b/src/install/dependency.zig
@@ -6,10 +6,18 @@ const std = @import("std");
const SlicedString = Semver.SlicedString;
const PackageNameHash = @import("./install.zig").PackageNameHash;
const Features = @import("./install.zig").Features;
+const Install = @import("./install.zig");
const logger = @import("../logger.zig");
const Dependency = @This();
const string = @import("../string_types.zig").string;
const strings = @import("../string_immutable.zig");
+const bun = @import("../global.zig");
+
+pub const Pair = struct {
+ resolution_id: Install.PackageID = Install.invalid_package_id,
+ dependency: Dependency = .{},
+ failed: ?anyerror = null,
+};
pub const URI = union(Tag) {
local: String,
@@ -63,19 +71,27 @@ pub fn isLessThan(string_buf: []const u8, lhs: Dependency, rhs: Dependency) bool
return strings.cmpStringsAsc(void{}, lhs_name, rhs_name);
}
+pub fn countWithDifferentBuffers(this: Dependency, name_buf: []const u8, version_buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) void {
+ builder.count(this.name.slice(name_buf));
+ builder.count(this.version.literal.slice(version_buf));
+}
+
pub fn count(this: Dependency, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) void {
- builder.count(this.name.slice(buf));
- builder.count(this.version.literal.slice(buf));
+ this.countWithDifferentBuffers(buf, buf, StringBuilder, builder);
}
pub fn clone(this: Dependency, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) !Dependency {
+ return this.cloneWithDifferentBuffers(buf, buf, StringBuilder, builder);
+}
+
+pub fn cloneWithDifferentBuffers(this: Dependency, name_buf: []const u8, version_buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) !Dependency {
const out_slice = builder.lockfile.buffers.string_bytes.items;
- const new_literal = builder.append(String, this.version.literal.slice(buf));
+ const new_literal = builder.append(String, this.version.literal.slice(version_buf));
const sliced = new_literal.sliced(out_slice);
return Dependency{
.name_hash = this.name_hash,
- .name = builder.append(String, this.name.slice(buf)),
+ .name = builder.append(String, this.name.slice(name_buf)),
.version = Dependency.parseWithTag(
builder.lockfile.allocator,
new_literal.slice(out_slice),
@@ -128,6 +144,34 @@ pub const Version = struct {
literal: String = String{},
value: Value = Value{ .uninitialized = void{} },
+ pub fn deinit(this: *Version) void {
+ switch (this.tag) {
+ .npm => {
+ this.value.npm.deinit();
+ },
+ else => {},
+ }
+ }
+
+ pub const @"0.0.0" = Version{
+ .tag = Dependency.Version.Tag.npm,
+ .literal = String.init("0.0.0", "0.0.0"),
+ .value = Value{
+ .npm = Semver.Query.Group{
+ .allocator = bun.default_allocator,
+ .head = .{
+ .head = .{
+ .range = .{
+ .left = .{
+ .op = .gte,
+ },
+ },
+ },
+ },
+ },
+ },
+ };
+
pub const zeroed = Version{};
pub fn clone(
diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig
index ebf73d1a0..c74c0fb29 100644
--- a/src/install/extract_tarball.zig
+++ b/src/install/extract_tarball.zig
@@ -22,6 +22,7 @@ package_id: PackageID,
skip_verify: bool = false,
integrity: Integrity = Integrity{},
url: string = "",
+package_manager: *PackageManager = &PackageManager.instance,
pub inline fn run(this: ExtractTarball, bytes: []const u8) !string {
if (!this.skip_verify and this.integrity.tag.isSupported()) {
@@ -220,7 +221,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string {
Output.flush();
}
}
- var folder_name = PackageManager.instance.cachedNPMPackageFolderNamePrint(&abs_buf2, name, this.resolution.value.npm.version);
+ var folder_name = this.package_manager.cachedNPMPackageFolderNamePrint(&abs_buf2, name, this.resolution.value.npm.version);
if (folder_name.len == 0 or (folder_name.len == 1 and folder_name[0] == '/')) @panic("Tried to delete root and stopped it");
var cache_dir = this.cache_dir;
cache_dir.deleteTree(folder_name) catch {};
diff --git a/src/install/install.zig b/src/install/install.zig
index 683261e50..a30fdb139 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -175,6 +175,7 @@ const NetworkTask = struct {
allocator: std.mem.Allocator,
request_buffer: MutableString = undefined,
response_buffer: MutableString = undefined,
+ package_manager: *PackageManager = &PackageManager.instance,
callback: union(Task.Tag) {
package_manifest: struct {
loaded_manifest: ?Npm.PackageManifest = null,
@@ -185,8 +186,8 @@ const NetworkTask = struct {
},
pub fn notify(this: *NetworkTask, _: anytype) void {
- defer PackageManager.instance.wake();
- PackageManager.instance.network_channel.writeItem(this) catch {};
+ defer this.package_manager.wake();
+ this.package_manager.network_channel.writeItem(this) catch {};
}
// We must use a less restrictive Accept header value
@@ -330,7 +331,7 @@ const NetworkTask = struct {
0,
this.getCompletionCallback(),
);
- this.http.max_retry_count = PackageManager.instance.options.max_retry_count;
+ this.http.max_retry_count = this.package_manager.options.max_retry_count;
this.callback = .{
.package_manifest = .{
.name = try strings.StringOrTinyString.initAppendIfNeeded(name, *FileSystem.FilenameStore, &FileSystem.FilenameStore.instance),
@@ -369,7 +370,7 @@ const NetworkTask = struct {
scope.url.href,
tarball.name,
tarball.resolution.value.npm.version,
- PackageManager.instance.lockfile.buffers.string_bytes.items,
+ this.package_manager.lockfile.buffers.string_bytes.items,
);
} else {
this.url_buf = tarball.url;
@@ -412,7 +413,7 @@ const NetworkTask = struct {
0,
this.getCompletionCallback(),
);
- this.http.max_retry_count = PackageManager.instance.options.max_retry_count;
+ this.http.max_retry_count = this.package_manager.options.max_retry_count;
this.callback = .{ .extract = tarball };
}
};
@@ -489,6 +490,7 @@ const Task = struct {
log: logger.Log,
id: u64,
err: ?anyerror = null,
+ package_manager: *PackageManager = &PackageManager.instance,
/// An ID that lets us register a callback without keeping the same pointer around
pub const Id = struct {
@@ -519,11 +521,11 @@ const Task = struct {
var this = @fieldParentPtr(Task, "threadpool_task", task);
- defer PackageManager.instance.wake();
+ defer this.package_manager.wake();
switch (this.tag) {
.package_manifest => {
- var allocator = PackageManager.instance.allocator;
+ var allocator = bun.default_allocator;
const package_manifest = Npm.Registry.getPackageMetadata(
allocator,
this.request.package_manifest.network.http.response.?,
@@ -531,6 +533,7 @@ const Task = struct {
&this.log,
this.request.package_manifest.name.slice(),
this.request.package_manifest.network.callback.package_manifest.loaded_manifest,
+ this.package_manager,
) catch |err| {
if (comptime Environment.isDebug) {
if (@errorReturnTrace()) |trace| {
@@ -539,7 +542,7 @@ const Task = struct {
}
this.err = err;
this.status = Status.fail;
- PackageManager.instance.resolve_tasks.writeItem(this.*) catch unreachable;
+ this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
return;
};
@@ -550,7 +553,7 @@ const Task = struct {
.fresh => |manifest| {
this.data = .{ .package_manifest = manifest };
this.status = Status.success;
- PackageManager.instance.resolve_tasks.writeItem(this.*) catch unreachable;
+ this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
return;
},
.not_found => {
@@ -558,7 +561,7 @@ const Task = struct {
this.request.package_manifest.name.slice(),
}) catch unreachable;
this.status = Status.fail;
- PackageManager.instance.resolve_tasks.writeItem(this.*) catch unreachable;
+ this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
return;
},
}
@@ -576,13 +579,13 @@ const Task = struct {
this.err = err;
this.status = Status.fail;
this.data = .{ .extract = "" };
- PackageManager.instance.resolve_tasks.writeItem(this.*) catch unreachable;
+ this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
return;
};
this.data = .{ .extract = result };
this.status = Status.success;
- PackageManager.instance.resolve_tasks.writeItem(this.*) catch unreachable;
+ this.package_manager.resolve_tasks.writeItem(this.*) catch unreachable;
},
.binlink => {},
}
@@ -1396,17 +1399,23 @@ const PackageInstall = struct {
}
};
-const Resolution = @import("./resolution.zig").Resolution;
+pub const Resolution = @import("./resolution.zig").Resolution;
const Progress = std.Progress;
const TaggedPointer = @import("../tagged_pointer.zig");
const TaskCallbackContext = union(Tag) {
dependency: PackageID,
request_id: PackageID,
+ root_dependency: PackageID,
+ root_request_id: PackageID,
node_modules_folder: u32, // Really, this is a file descriptor
+ root_node_modules_folder: u32, // Really, this is a file descriptor
pub const Tag = enum {
dependency,
request_id,
node_modules_folder,
+ root_dependency,
+ root_request_id,
+ root_node_modules_folder,
};
};
@@ -1424,6 +1433,7 @@ pub const CacheLevel = struct {
};
const AsyncIO = @import("io");
const Waker = AsyncIO.Waker;
+
// We can't know all the packages we need until we've downloaded all the packages
// The easy way would be:
// 1. Download all packages, parsing their dependencies and enqueuing all dependencies for resolution
@@ -1436,7 +1446,7 @@ pub const PackageManager = struct {
allocator: std.mem.Allocator,
log: *logger.Log,
resolve_tasks: TaskChannel,
- timestamp: u32 = 0,
+ timestamp_for_manifest_cache_control: u32 = 0,
extracted_count: u32 = 0,
default_features: Features = Features{},
summary: Lockfile.Package.Diff.Summary = Lockfile.Package.Diff.Summary{},
@@ -1448,11 +1458,20 @@ pub const PackageManager = struct {
cpu_count: u32 = 0,
package_json_updates: []UpdateRequest = &[_]UpdateRequest{},
+ // progress bar stuff when not stack allocated
+ root_progress_node: *std.Progress.Node = undefined,
+ root_download_node: std.Progress.Node = undefined,
+
to_remove: []const UpdateRequest = &[_]UpdateRequest{},
root_package_json_file: std.fs.File,
root_dependency_list: Lockfile.DependencySlice = .{},
+ /// Used to make "dependencies" optional in the main package
+ /// Depended on packages have to explicitly list their dependencies
+ dynamic_root_dependencies: ?std.ArrayList(Dependency.Pair) = null,
+ // remote_dependencies: RemoteDependency.List = .{},
+
thread_pool: ThreadPool,
manifests: PackageManifestMap = PackageManifestMap{},
@@ -1479,6 +1498,8 @@ pub const PackageManager = struct {
waiter: Waker = undefined,
wait_count: std.atomic.Atomic(usize) = std.atomic.Atomic(usize).init(0),
+ onWake: WakeHandler = .{},
+
const PreallocatedNetworkTasks = std.BoundedArray(NetworkTask, 1024);
const NetworkTaskQueue = std.HashMapUnmanaged(u64, void, IdentityContext(u64), 80);
const PackageIndex = std.AutoHashMapUnmanaged(u64, *Package);
@@ -1491,7 +1512,35 @@ pub const PackageManager = struct {
80,
);
+ pub const WakeHandler = struct {
+ handler: fn (ctx: *anyopaque, pm: *PackageManager) void = undefined,
+ onDependencyError: fn (ctx: *anyopaque, Dependency, PackageID, anyerror) void = undefined,
+ context: ?*anyopaque = null,
+ };
+
+ pub fn failRootResolution(this: *PackageManager, dependency: Dependency, dependency_id: PackageID, err: anyerror) void {
+ if (this.dynamic_root_dependencies) |*dynamic| {
+ dynamic.items[dependency_id].failed = err;
+ if (this.onWake.context) |ctx| {
+ this.onWake.onDependencyError(
+ ctx,
+ dependency,
+ dependency_id,
+ err,
+ );
+ }
+ } else {
+ // this means a bug
+ bun.unreachablePanic("assignRootResolution: dependency_id: {d} out of bounds", .{dependency_id});
+ }
+ }
+
pub fn wake(this: *PackageManager) void {
+ if (this.onWake.context != null) {
+ this.onWake.handler(this.onWake.context.?, this);
+ return;
+ }
+
_ = this.wait_count.fetchAdd(1, .Monotonic);
this.waiter.wake() catch {};
}
@@ -1501,6 +1550,92 @@ pub const PackageManager = struct {
_ = this.waiter.wait() catch 0;
}
+ const DependencyToEnqueue = union(enum) {
+ pending: PackageID,
+ resolution: struct { package_id: PackageID, resolution: Resolution },
+ not_found: void,
+ failure: anyerror,
+ };
+ pub fn enqueueDependencyToRoot(
+ this: *PackageManager,
+ name: []const u8,
+ version_buf: []const u8,
+ version: Dependency.Version,
+ behavior: Dependency.Behavior,
+ ) DependencyToEnqueue {
+ var root_deps = this.dynamicRootDependencies();
+ const existing: []const Dependency.Pair = root_deps.items;
+ var str_buf = this.lockfile.buffers.string_bytes.items;
+ for (existing) |pair, i| {
+ if (strings.eqlLong(this.lockfile.str(pair.dependency.name), name, true)) {
+ if (pair.dependency.version.eql(version, str_buf, version_buf)) {
+ if (pair.resolution_id != invalid_package_id) {
+ return .{
+ .resolution = .{
+ .resolution = this.lockfile.packages.items(.resolution)[pair.resolution_id],
+ .package_id = pair.resolution_id,
+ },
+ };
+ }
+ return .{ .pending = @truncate(u32, i) };
+ }
+ }
+ }
+
+ var builder = this.lockfile.stringBuilder();
+ const dependency = Dependency{
+ .name = String.init(name, name),
+ .name_hash = String.Builder.stringHash(name),
+ .version = version,
+ .behavior = behavior,
+ };
+ dependency.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder);
+
+ builder.allocate() catch |err| {
+ return .{ .failure = err };
+ };
+
+ const cloned_dependency = dependency.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable;
+ builder.clamp();
+ const index = @truncate(u32, root_deps.items.len);
+ root_deps.append(
+ .{
+ .dependency = cloned_dependency,
+ },
+ ) catch unreachable;
+ this.enqueueDependencyWithMainAndSuccessFn(
+ index,
+ cloned_dependency,
+ invalid_package_id,
+ true,
+ assignRootResolution,
+ failRootResolution,
+ ) catch |err| {
+ root_deps.items.len = index;
+ return .{ .failure = err };
+ };
+
+ if (root_deps.items[index].failed) |fail| {
+ root_deps.items.len = index;
+ return .{ .failure = fail };
+ }
+
+ const resolution_id = root_deps.items[index].resolution_id;
+
+ // check if we managed to synchronously resolve the dependency
+ if (resolution_id != invalid_package_id) {
+ this.drainDependencyList();
+ return .{
+ .resolution = .{
+ .resolution = this.lockfile.packages.items(.resolution)[resolution_id],
+ .package_id = resolution_id,
+ },
+ };
+ }
+
+ return .{ .pending = index };
+ }
+
pub fn globalLinkDir(this: *PackageManager) !std.fs.Dir {
return this.global_link_dir orelse brk: {
var global_dir = try Options.openGlobalDir(this.options.explicit_global_directory);
@@ -1533,6 +1668,7 @@ pub const PackageManager = struct {
this.ensurePreinstallStateListCapacity(lockfile.packages.len) catch return;
this.preinstall_state.items[package_id] = value;
}
+
pub fn getPreinstallState(this: *PackageManager, package_id: PackageID, _: *Lockfile) PreinstallState {
if (package_id >= this.preinstall_state.items.len) {
return PreinstallState.unknown;
@@ -1779,6 +1915,146 @@ pub const PackageManager = struct {
return true;
}
+ pub fn pathForCachedNPMPath(
+ this: *PackageManager,
+ buf: *[bun.MAX_PATH_BYTES]u8,
+ package_name: []const u8,
+ npm: Semver.Version,
+ ) ![]u8 {
+ var package_name_version_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+
+ var subpath = std.fmt.bufPrintZ(
+ &package_name_version_buf,
+ "{s}" ++ std.fs.path.sep_str ++ "{any}",
+ .{
+ package_name,
+ npm.fmt(this.lockfile.buffers.string_bytes.items),
+ },
+ ) catch unreachable;
+ return this.getCacheDirectory().readLink(
+ subpath,
+ buf,
+ ) catch |err| {
+ // if we run into an error, delete the symlink
+ // so that we don't repeatedly try to read it
+ std.os.unlinkat(this.getCacheDirectory().fd, subpath, 0) catch {};
+ return err;
+ };
+ }
+
+ pub fn pathForResolution(
+ this: *PackageManager,
+ package_id: PackageID,
+ resolution: Resolution,
+ buf: *[bun.MAX_PATH_BYTES]u8,
+ ) ![]u8 {
+ // const folder_name = this.cachedNPMPackageFolderName(name, version);
+ switch (resolution.tag) {
+ .npm => {
+ const npm = resolution.value.npm;
+ const package_name_ = this.lockfile.packages.items(.name)[package_id];
+ const package_name = this.lockfile.str(package_name_);
+
+ return this.pathForCachedNPMPath(buf, package_name, npm.version);
+ },
+ else => return "",
+ }
+ }
+
+ pub fn getInstalledVersionsFromDiskCache(this: *PackageManager, tags_buf: *std.ArrayList(u8), package_name: []const u8, allocator: std.mem.Allocator) !std.ArrayList(Semver.Version) {
+ var list = std.ArrayList(Semver.Version).init(allocator);
+ var dir = this.getCacheDirectory().openDir(package_name, .{ .iterate = true }) catch |err| {
+ switch (err) {
+ error.FileNotFound, error.NotDir, error.AccessDenied, error.DeviceBusy => {
+ return list;
+ },
+ else => return err,
+ }
+ };
+ defer dir.close();
+ var iter = dir.iterate();
+
+ while (try iter.next()) |entry| {
+ if (entry.kind != .Directory and entry.kind != .SymLink) continue;
+ const name = entry.name;
+ var sliced = SlicedString.init(name, name);
+ var parsed = Semver.Version.parse(sliced, allocator);
+ if (!parsed.valid or parsed.wildcard != .none) continue;
+ // not handling OOM
+ // TODO: wildcard
+ const total = parsed.version.tag.build.len() + parsed.version.tag.pre.len();
+ if (total > 0) {
+ tags_buf.ensureUnusedCapacity(total) catch unreachable;
+ var available = tags_buf.items.ptr[tags_buf.items.len..tags_buf.capacity];
+ const new_version = parsed.version.cloneInto(name, &available);
+ tags_buf.items.len += total;
+ parsed.version = new_version;
+ }
+
+ list.append(parsed.version) catch unreachable;
+ }
+
+ return list;
+ }
+
+ pub fn resolveFromDiskCache(this: *PackageManager, package_name: []const u8, version: Dependency.Version) ?PackageID {
+ if (version.tag != .npm) {
+ // only npm supported right now
+ // tags are more ambiguous
+ return null;
+ }
+
+ var arena = std.heap.ArenaAllocator.init(this.allocator);
+ defer arena.deinit();
+ var arena_alloc = arena.allocator();
+ var stack_fallback = std.heap.stackFallback(4096, arena_alloc);
+ var allocator = stack_fallback.get();
+ var tags_buf = std.ArrayList(u8).init(allocator);
+ var installed_versions = this.getInstalledVersionsFromDiskCache(&tags_buf, package_name, allocator) catch |err| {
+ Output.debug("error getting installed versions from disk cache: {s}", .{std.mem.span(@errorName(err))});
+ return null;
+ };
+
+ // TODO: make this fewer passes
+ std.sort.sort(
+ Semver.Version,
+ installed_versions.items,
+ @as([]const u8, tags_buf.items),
+ Semver.Version.sortGt,
+ );
+ for (installed_versions.items) |installed_version| {
+ if (version.value.npm.satisfies(installed_version)) {
+ var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var npm_package_path = this.pathForCachedNPMPath(&buf, package_name, installed_version) catch |err| {
+ Output.debug("error getting path for cached npm path: {s}", .{std.mem.span(@errorName(err))});
+ return null;
+ };
+ const dependency = Dependency.Version{
+ .tag = .npm,
+ .value = .{
+ .npm = Semver.Query.Group.from(installed_version),
+ },
+ };
+ switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this)) {
+ .new_package_id => |id| {
+ this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id], false);
+ return id;
+ },
+ .package_id => |id| {
+ this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id], false);
+ return id;
+ },
+ .err => |err| {
+ Output.debug("error getting or putting folder resolution: {s}", .{std.mem.span(@errorName(err))});
+ return null;
+ },
+ }
+ }
+ }
+
+ return null;
+ }
+
const ResolvedPackageResult = struct {
package: Lockfile.Package,
@@ -1798,6 +2074,7 @@ pub const PackageManager = struct {
behavior: Behavior,
manifest: *const Npm.PackageManifest,
find_result: Npm.PackageManifest.FindResult,
+ comptime successFn: SuccessFn,
) !?ResolvedPackageResult {
// Was this package already allocated? Let's reuse the existing one.
@@ -1814,7 +2091,7 @@ pub const PackageManager = struct {
},
},
)) |id| {
- this.lockfile.buffers.resolutions.items[dependency_id] = id;
+ successFn(this, dependency_id, id);
return ResolvedPackageResult{
.package = this.lockfile.packages.get(id),
.is_first_time = false,
@@ -1836,7 +2113,7 @@ pub const PackageManager = struct {
// appendPackage sets the PackageID on the package
package = try this.lockfile.appendPackage(package);
- if (!behavior.isEnabled(if (this.root_dependency_list.contains(dependency_id))
+ if (!behavior.isEnabled(if (this.isRootDependency(dependency_id))
this.options.local_package_features
else
this.options.remote_package_features))
@@ -1846,9 +2123,8 @@ pub const PackageManager = struct {
const preinstall = this.determinePreinstallState(package, this.lockfile);
- this.lockfile.buffers.resolutions.items[dependency_id] = package.meta.id;
if (comptime Environment.isDebug or Environment.isTest) std.debug.assert(package.meta.id != invalid_package_id);
-
+ defer successFn(this, dependency_id, package.meta.id);
switch (preinstall) {
// Is this package already in the cache?
// We don't need to download the tarball, but we should enqueue dependencies
@@ -1888,6 +2164,7 @@ pub const PackageManager = struct {
.task_id = task_id,
.callback = undefined,
.allocator = this.allocator,
+ .package_manager = this,
};
const scope = this.scopeForPackageName(this.lockfile.str(package.name));
@@ -1934,7 +2211,7 @@ pub const PackageManager = struct {
const manifest: Npm.PackageManifest = manifest_;
loaded_manifest = manifest;
- if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp) {
+ if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp_for_manifest_cache_control) {
try this.manifests.put(this.allocator, @truncate(PackageNameHash, manifest.pkg.name.hash), manifest);
}
@@ -1947,7 +2224,7 @@ pub const PackageManager = struct {
}
// Was it recent enough to just load it without the network call?
- if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp) {
+ if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp_for_manifest_cache_control) {
return manifest;
}
}
@@ -1962,6 +2239,7 @@ pub const PackageManager = struct {
.callback = undefined,
.task_id = task_id,
.allocator = this.allocator,
+ .package_manager = this,
};
try network_task.forManifest(name, this.allocator, this.scopeForPackageName(name), loaded_manifest);
this.enqueueNetworkTask(network_task);
@@ -1984,6 +2262,25 @@ pub const PackageManager = struct {
this.network_task_fifo.writeItemAssumeCapacity(task);
}
+ const SuccessFn = fn (*PackageManager, PackageID, PackageID) void;
+ const FailFn = fn (*PackageManager, Dependency, PackageID, anyerror) void;
+ fn assignResolution(this: *PackageManager, dependency_id: PackageID, package_id: PackageID) void {
+ this.lockfile.buffers.resolutions.items[dependency_id] = package_id;
+ }
+
+ fn assignRootResolution(this: *PackageManager, dependency_id: PackageID, package_id: PackageID) void {
+ if (this.dynamic_root_dependencies) |*dynamic| {
+ dynamic.items[dependency_id].resolution_id = package_id;
+ } else {
+ if (this.lockfile.buffers.resolutions.items.len > dependency_id) {
+ this.lockfile.buffers.resolutions.items[dependency_id] = package_id;
+ } else {
+ // this means a bug
+ bun.unreachablePanic("assignRootResolution: dependency_id: {d} out of bounds (package_id: {d})", .{ dependency_id, package_id });
+ }
+ }
+ }
+
pub fn getOrPutResolvedPackage(
this: *PackageManager,
name_hash: PackageNameHash,
@@ -1992,6 +2289,7 @@ pub const PackageManager = struct {
behavior: Behavior,
dependency_id: PackageID,
resolution: PackageID,
+ comptime successFn: SuccessFn,
) !?ResolvedPackageResult {
if (resolution < this.lockfile.packages.len) {
return ResolvedPackageResult{ .package = this.lockfile.packages.get(resolution) };
@@ -2011,7 +2309,17 @@ pub const PackageManager = struct {
else => unreachable,
};
- return try getOrPutResolvedPackageWithFindResult(this, name_hash, name, version, dependency_id, behavior, manifest, find_result);
+ return try getOrPutResolvedPackageWithFindResult(
+ this,
+ name_hash,
+ name,
+ version,
+ dependency_id,
+ behavior,
+ manifest,
+ find_result,
+ successFn,
+ );
},
.folder => {
@@ -2021,12 +2329,12 @@ pub const PackageManager = struct {
switch (res) {
.err => |err| return err,
.package_id => |package_id| {
- this.lockfile.buffers.resolutions.items[dependency_id] = package_id;
+ successFn(this, dependency_id, package_id);
return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id) };
},
.new_package_id => |package_id| {
- this.lockfile.buffers.resolutions.items[dependency_id] = package_id;
+ successFn(this, dependency_id, package_id);
return ResolvedPackageResult{ .package = this.lockfile.packages.get(package_id), .is_first_time = true };
},
}
@@ -2096,10 +2404,31 @@ pub const PackageManager = struct {
return &task.threadpool_task;
}
- inline fn enqueueDependency(this: *PackageManager, id: u32, dependency: Dependency, resolution: PackageID) !void {
+ pub inline fn enqueueDependency(this: *PackageManager, id: u32, dependency: Dependency, resolution: PackageID) !void {
return try this.enqueueDependencyWithMain(id, dependency, resolution, false);
}
+ pub inline fn enqueueMainDependency(this: *PackageManager, id: u32, dependency: Dependency, resolution: PackageID) !void {
+ return try this.enqueueDependencyWithMain(id, dependency, resolution, true);
+ }
+
+ pub fn dynamicRootDependencies(this: *PackageManager) *std.ArrayList(Dependency.Pair) {
+ if (this.dynamic_root_dependencies == null) {
+ const root_deps = this.lockfile.rootPackage().?.dependencies.get(this.lockfile.buffers.dependencies.items);
+
+ this.dynamic_root_dependencies = std.ArrayList(Dependency.Pair).initCapacity(this.allocator, root_deps.len) catch unreachable;
+ this.dynamic_root_dependencies.?.items.len = root_deps.len;
+ for (root_deps) |dep, i| {
+ this.dynamic_root_dependencies.?.items[i] = .{
+ .dependency = dep,
+ .resolution_id = invalid_package_id,
+ };
+ }
+ }
+
+ return &this.dynamic_root_dependencies.?;
+ }
+
pub fn writeYarnLock(this: *PackageManager) !void {
var printer = Lockfile.Printer{
.lockfile = this.lockfile,
@@ -2142,6 +2471,14 @@ pub const PackageManager = struct {
try tmpfile.promote(tmpname, std.fs.cwd().fd, "yarn.lock");
}
+ pub fn isRootDependency(this: *const PackageManager, id: PackageID) bool {
+ if (this.dynamic_root_dependencies != null) {
+ return false;
+ }
+
+ return this.root_dependency_list.contains(id);
+ }
+
fn enqueueDependencyWithMain(
this: *PackageManager,
id: u32,
@@ -2149,6 +2486,25 @@ pub const PackageManager = struct {
resolution: PackageID,
comptime is_main: bool,
) !void {
+ return this.enqueueDependencyWithMainAndSuccessFn(
+ id,
+ dependency,
+ resolution,
+ is_main,
+ assignResolution,
+ null,
+ );
+ }
+
+ pub fn enqueueDependencyWithMainAndSuccessFn(
+ this: *PackageManager,
+ id: u32,
+ dependency: Dependency,
+ resolution: PackageID,
+ comptime is_main: bool,
+ comptime successFn: SuccessFn,
+ comptime failFn: ?FailFn,
+ ) !void {
const name = dependency.name;
const name_hash = dependency.name_hash;
const version: Dependency.Version = dependency.version;
@@ -2156,7 +2512,7 @@ pub const PackageManager = struct {
if (comptime !is_main) {
// it might really be main
- if (!this.root_dependency_list.contains(id))
+ if (!this.isRootDependency(id))
if (!dependency.behavior.isEnabled(switch (dependency.version.tag) {
.folder => this.options.remote_package_features,
.dist_tag, .npm => this.options.remote_package_features,
@@ -2175,6 +2531,7 @@ pub const PackageManager = struct {
dependency.behavior,
id,
resolution,
+ successFn,
);
retry_with_new_resolve_result: while (true) {
@@ -2182,36 +2539,66 @@ pub const PackageManager = struct {
switch (err) {
error.DistTagNotFound => {
if (dependency.behavior.isRequired()) {
- this.log.addErrorFmt(
- null,
- logger.Loc.Empty,
- this.allocator,
- "package \"{s}\" with tag \"{s}\" not found, but package exists",
- .{
- this.lockfile.str(name),
- this.lockfile.str(version.value.dist_tag),
- },
- ) catch unreachable;
+ if (failFn) |fail| {
+ fail(
+ this,
+ dependency,
+ id,
+ err,
+ );
+ } else {
+ this.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ this.allocator,
+ "package \"{s}\" with tag \"{s}\" not found, but package exists",
+ .{
+ this.lockfile.str(name),
+ this.lockfile.str(version.value.dist_tag),
+ },
+ ) catch unreachable;
+ }
}
return;
},
error.NoMatchingVersion => {
if (dependency.behavior.isRequired()) {
- this.log.addErrorFmt(
- null,
- logger.Loc.Empty,
- this.allocator,
- "No version matching \"{s}\" found for specifier \"{s}\" (but package exists)",
- .{
- this.lockfile.str(version.literal),
- this.lockfile.str(name),
- },
- ) catch unreachable;
+ if (failFn) |fail| {
+ fail(
+ this,
+ dependency,
+ id,
+ err,
+ );
+ } else {
+ this.log.addErrorFmt(
+ null,
+ logger.Loc.Empty,
+ this.allocator,
+ "No version matching \"{s}\" found for specifier \"{s}\" (but package exists)",
+ .{
+ this.lockfile.str(version.literal),
+ this.lockfile.str(name),
+ },
+ ) catch unreachable;
+ }
}
return;
},
- else => return err,
+ else => {
+ if (failFn) |fail| {
+ fail(
+ this,
+ dependency,
+ id,
+ err,
+ );
+ return;
+ }
+
+ return err;
+ },
}
};
@@ -2251,7 +2638,7 @@ pub const PackageManager = struct {
const manifest: Npm.PackageManifest = manifest_;
loaded_manifest = manifest;
- if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp) {
+ if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp_for_manifest_cache_control) {
try this.manifests.put(this.allocator, @truncate(PackageNameHash, manifest.pkg.name.hash), manifest);
}
@@ -2267,6 +2654,7 @@ pub const PackageManager = struct {
dependency.behavior,
&loaded_manifest.?,
find_result,
+ successFn,
) catch null) |new_resolve_result| {
resolve_result_ = new_resolve_result;
_ = this.network_dedupe_map.remove(task_id);
@@ -2276,7 +2664,7 @@ pub const PackageManager = struct {
}
// Was it recent enough to just load it without the network call?
- if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp) {
+ if (this.options.enable.manifest_cache_control and manifest.pkg.public_max_age > this.timestamp_for_manifest_cache_control) {
_ = this.network_dedupe_map.remove(task_id);
continue :retry_from_manifests_ptr;
}
@@ -2309,7 +2697,8 @@ pub const PackageManager = struct {
manifest_entry_parse.value_ptr.* = TaskCallbackList{};
}
- try manifest_entry_parse.value_ptr.append(this.allocator, TaskCallbackContext{ .dependency = id });
+ const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency";
+ try manifest_entry_parse.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id));
}
return;
}
@@ -2324,6 +2713,7 @@ pub const PackageManager = struct {
dependency.behavior,
id,
resolution,
+ successFn,
) catch |err| brk: {
if (err == error.MissingPackageJSON) {
break :brk null;
@@ -2401,6 +2791,8 @@ pub const PackageManager = struct {
var lockfile = this.lockfile;
var dependency_queue = &lockfile.scratch.dependency_list_queue;
+ this.flushNetworkQueue();
+
while (dependency_queue.readItem()) |dependencies_list| {
var i: u32 = dependencies_list.off;
const end = dependencies_list.off + dependencies_list.len;
@@ -2459,6 +2851,10 @@ pub const PackageManager = struct {
}
}
+ this.drainDependencyList();
+ }
+
+ pub fn drainDependencyList(this: *PackageManager) void {
// Step 2. If there were cached dependencies, go through all of those but don't download the devDependencies for them.
this.flushDependencyQueue();
@@ -2469,11 +2865,68 @@ pub const PackageManager = struct {
this.pending_tasks += @truncate(u32, count);
this.total_tasks += @truncate(u32, count);
this.network_resolve_batch.push(this.network_tarball_batch);
+
HTTP.http_thread.schedule(this.network_resolve_batch);
this.network_tarball_batch = .{};
this.network_resolve_batch = .{};
}
+ fn processDependencyList(
+ this: *PackageManager,
+ dep_list: TaskCallbackList,
+ comptime Context: type,
+ ctx: Context,
+ comptime callbacks: anytype,
+ ) !void {
+ if (dep_list.items.len > 0) {
+ var dependency_list = dep_list;
+ var any_root = false;
+ for (dependency_list.items) |item| {
+ switch (item) {
+ .dependency => |dependency_id| {
+ const dependency = this.lockfile.buffers.dependencies.items[dependency_id];
+ const resolution = this.lockfile.buffers.resolutions.items[dependency_id];
+
+ try this.enqueueDependency(
+ dependency_id,
+ dependency,
+ resolution,
+ );
+ },
+
+ .root_dependency => |dependency_id| {
+ const pair = this.dynamicRootDependencies().items[dependency_id];
+ const dependency = pair.dependency;
+ const resolution = pair.resolution_id;
+
+ try this.enqueueDependencyWithMainAndSuccessFn(
+ dependency_id,
+ dependency,
+ resolution,
+ true,
+ assignRootResolution,
+ failRootResolution,
+ );
+
+ const new_resolution_id = this.dynamicRootDependencies().items[dependency_id].resolution_id;
+ if (new_resolution_id != pair.resolution_id) {
+ any_root = true;
+ }
+ },
+ else => unreachable,
+ }
+ }
+
+ if (comptime @TypeOf(callbacks.onResolve) != void) {
+ if (any_root) {
+ callbacks.onResolve(ctx);
+ }
+ }
+
+ dependency_list.deinit(this.allocator);
+ }
+ }
+
const CacheDir = struct { path: string, is_node_modules: bool };
pub fn fetchCacheDirectoryPath(
env_loader: *DotEnv.Loader,
@@ -2501,16 +2954,18 @@ pub const PackageManager = struct {
return CacheDir{ .is_node_modules = true, .path = Fs.FileSystem.instance.abs(&fallback_parts) };
}
- fn runTasks(
+ pub fn runTasks(
manager: *PackageManager,
comptime ExtractCompletionContext: type,
extract_ctx: ExtractCompletionContext,
- comptime callback_fn: anytype,
+ comptime callbacks: anytype,
comptime log_level: Options.LogLevel,
) anyerror!void {
var batch = ThreadPool.Batch{};
var has_updated_this_run = false;
+ var timestamp_this_tick: ?u32 = null;
+
while (manager.network_channel.tryReadItem() catch null) |task_| {
var task: *NetworkTask = task_;
manager.pending_tasks -|= 1;
@@ -2526,9 +2981,27 @@ pub const PackageManager = struct {
}
const response = task.http.response orelse {
- if (comptime log_level != .silent) {
+ const err = task.http.err orelse error.HTTPError;
+
+ if (@TypeOf(callbacks.onPackageManifestError) != void) {
+ if (manager.dynamic_root_dependencies) |*root_deps| {
+ var deps: []Dependency.Pair = root_deps.items;
+ for (deps) |*dep| {
+ if (strings.eql(manager.lockfile.str(dep.dependency.name), name.slice())) {
+ dep.failed = dep.failed orelse err;
+ }
+ }
+ }
+
+ callbacks.onPackageManifestError(
+ extract_ctx,
+ name.slice(),
+ err,
+ task.url_buf,
+ );
+ } else if (comptime log_level != .silent) {
const fmt = "\n<r><red>error<r>: {s} downloading package manifest <b>{s}<r>\n";
- const error_name: string = if (task.http.err) |err| std.mem.span(@errorName(err)) else "failed";
+ const error_name: string = std.mem.span(@errorName(err));
const args = .{ error_name, name.slice() };
if (comptime log_level.showProgress()) {
Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
@@ -2544,72 +3017,100 @@ pub const PackageManager = struct {
};
if (response.status_code > 399) {
- switch (response.status_code) {
- 404 => {
- if (comptime log_level != .silent) {
- const fmt = "\n<r><red>error<r>: package <b>\"{s}\"<r> not found <d>{s}{s} 404<r>\n";
- const args = .{
- name.slice(),
- task.http.url.displayHostname(),
- task.http.url.pathname,
- };
+ if (@TypeOf(callbacks.onPackageManifestError) != void) {
+ const err: PackageManifestError = switch (response.status_code) {
+ 400 => error.PackageManifestHTTP400,
+ 401 => error.PackageManifestHTTP401,
+ 402 => error.PackageManifestHTTP402,
+ 403 => error.PackageManifestHTTP403,
+ 404 => error.PackageManifestHTTP404,
+ 405...499 => error.PackageManifestHTTP4xx,
+ else => error.PackageManifestHTTP5xx,
+ };
- if (comptime log_level.showProgress()) {
- Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
- } else {
- Output.prettyErrorln(fmt, args);
- Output.flush();
+ if (manager.dynamic_root_dependencies) |*root_deps| {
+ var deps: []Dependency.Pair = root_deps.items;
+ for (deps) |*dep| {
+ if (strings.eql(manager.lockfile.str(dep.dependency.name), name.slice())) {
+ dep.failed = dep.failed orelse err;
}
}
- },
- 401 => {
- if (comptime log_level != .silent) {
- const fmt = "\n<r><red>error<r>: unauthorized <b>\"{s}\"<r> <d>{s}{s} 401<r>\n";
- const args = .{
- name.slice(),
- task.http.url.displayHostname(),
- task.http.url.pathname,
- };
+ }
- if (comptime log_level.showProgress()) {
- Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
- } else {
- Output.prettyErrorln(fmt, args);
- Output.flush();
+ callbacks.onPackageManifestError(
+ extract_ctx,
+ name.slice(),
+ err,
+ task.url_buf,
+ );
+ } else {
+ switch (response.status_code) {
+ 404 => {
+ if (comptime log_level != .silent) {
+ const fmt = "\n<r><red>error<r>: package <b>\"{s}\"<r> not found <d>{s}{s} 404<r>\n";
+ const args = .{
+ name.slice(),
+ task.http.url.displayHostname(),
+ task.http.url.pathname,
+ };
+
+ if (comptime log_level.showProgress()) {
+ Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
+ } else {
+ Output.prettyErrorln(fmt, args);
+ Output.flush();
+ }
}
- }
- },
- 403 => {
- if (comptime log_level != .silent) {
- const fmt = "\n<r><red>error<r>: forbidden while loading <b>\"{s}\"<r><d> 403<r>\n";
- const args = .{
- name.slice(),
- };
-
- if (comptime log_level.showProgress()) {
- Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
- } else {
- Output.prettyErrorln(fmt, args);
- Output.flush();
+ },
+ 401 => {
+ if (comptime log_level != .silent) {
+ const fmt = "\n<r><red>error<r>: unauthorized <b>\"{s}\"<r> <d>{s}{s} 401<r>\n";
+ const args = .{
+ name.slice(),
+ task.http.url.displayHostname(),
+ task.http.url.pathname,
+ };
+
+ if (comptime log_level.showProgress()) {
+ Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
+ } else {
+ Output.prettyErrorln(fmt, args);
+ Output.flush();
+ }
}
- }
- },
- else => {
- if (comptime log_level != .silent) {
- const fmt = "\n<r><red><b>GET<r><red> {s}<d> - {d}<r>\n";
- const args = .{
- task.http.client.url.href,
- response.status_code,
- };
-
- if (comptime log_level.showProgress()) {
- Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
- } else {
- Output.prettyErrorln(fmt, args);
- Output.flush();
+ },
+ 403 => {
+ if (comptime log_level != .silent) {
+ const fmt = "\n<r><red>error<r>: forbidden while loading <b>\"{s}\"<r><d> 403<r>\n";
+ const args = .{
+ name.slice(),
+ };
+
+ if (comptime log_level.showProgress()) {
+ Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
+ } else {
+ Output.prettyErrorln(fmt, args);
+ Output.flush();
+ }
}
- }
- },
+ },
+ else => {
+ if (comptime log_level != .silent) {
+ const fmt = "\n<r><red><b>GET<r><red> {s}<d> - {d}<r>\n";
+ const args = .{
+ task.http.client.url.href,
+ response.status_code,
+ };
+
+ if (comptime log_level.showProgress()) {
+ Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
+ } else {
+ Output.prettyErrorln(fmt, args);
+ Output.flush();
+ }
+ }
+ },
+ }
}
for (manager.package_json_updates) |*update| {
if (strings.eql(update.name, name.slice())) {
@@ -2635,9 +3136,14 @@ pub const PackageManager = struct {
if (manifest_req.loaded_manifest) |manifest| {
var entry = try manager.manifests.getOrPut(manager.allocator, manifest.pkg.name.hash);
entry.value_ptr.* = manifest;
- entry.value_ptr.*.pkg.public_max_age = @truncate(u32, @intCast(u64, @maximum(0, std.time.timestamp()))) + 300;
+
+ if (timestamp_this_tick == null) {
+ timestamp_this_tick = @truncate(u32, @intCast(u64, @maximum(0, std.time.timestamp()))) +| 300;
+ }
+
+ entry.value_ptr.*.pkg.public_max_age = timestamp_this_tick.?;
{
- Npm.PackageManifest.Serializer.save(entry.value_ptr, PackageManager.instance.getTemporaryDirectory(), PackageManager.instance.getCacheDirectory()) catch {};
+ Npm.PackageManifest.Serializer.save(entry.value_ptr, manager.getTemporaryDirectory(), manager.getCacheDirectory()) catch {};
}
var dependency_list_entry = manager.task_queue.getEntry(task.task_id).?;
@@ -2645,20 +3151,7 @@ pub const PackageManager = struct {
var dependency_list = dependency_list_entry.value_ptr.*;
dependency_list_entry.value_ptr.* = .{};
- if (dependency_list.items.len > 0) {
- for (dependency_list.items) |item| {
- var dependency = manager.lockfile.buffers.dependencies.items[item.dependency];
- var resolution = manager.lockfile.buffers.resolutions.items[item.dependency];
-
- try manager.enqueueDependency(
- item.dependency,
- dependency,
- resolution,
- );
- }
-
- dependency_list.deinit(manager.allocator);
- }
+ try manager.processDependencyList(dependency_list, ExtractCompletionContext, extract_ctx, callbacks);
manager.flushDependencyQueue();
continue;
@@ -2669,23 +3162,71 @@ pub const PackageManager = struct {
},
.extract => |extract| {
const response = task.http.response orelse {
- const fmt = "\n<r><red>error<r>: {s} downloading tarball <b>{s}@{s}<r>\n";
- const error_name: string = if (task.http.err) |err| std.mem.span(@errorName(err)) else "failed";
- const args = .{ error_name, extract.name.slice(), extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items) };
+ const err = task.http.err orelse error.TarballFailedToDownload;
- if (comptime log_level != .silent) {
- if (comptime log_level.showProgress()) {
- Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
- } else {
- Output.prettyErrorln(fmt, args);
- Output.flush();
+ if (@TypeOf(callbacks.onPackageDownloadError) != void) {
+ if (manager.dynamic_root_dependencies) |*root_deps| {
+ for (root_deps.items) |*dep| {
+ if (dep.resolution_id == extract.package_id) {
+ dep.failed = err;
+ }
+ }
+ }
+ callbacks.onPackageDownloadError(
+ extract_ctx,
+ extract.package_id,
+ extract.name.slice(),
+ extract.resolution,
+ err,
+ task.url_buf,
+ );
+ } else {
+ const fmt = "\n<r><red>error<r>: {s} downloading tarball <b>{s}@{s}<r>\n";
+ const error_name: string = std.mem.span(@errorName(err));
+ const args = .{ error_name, extract.name.slice(), extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items) };
+
+ if (comptime log_level != .silent) {
+ if (comptime log_level.showProgress()) {
+ Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
+ } else {
+ Output.prettyErrorln(fmt, args);
+ Output.flush();
+ }
}
}
+
continue;
};
if (response.status_code > 399) {
- if (comptime log_level != .silent) {
+ if (@TypeOf(callbacks.onPackageDownloadError) != void) {
+ const err = switch (response.status_code) {
+ 400 => error.TarballHTTP400,
+ 401 => error.TarballHTTP401,
+ 402 => error.TarballHTTP402,
+ 403 => error.TarballHTTP403,
+ 404 => error.TarballHTTP404,
+ 405...499 => error.TarballHTTP4xx,
+ else => error.TarballHTTP5xx,
+ };
+
+ if (manager.dynamic_root_dependencies) |*root_deps| {
+ for (root_deps.items) |*dep| {
+ if (dep.resolution_id == extract.package_id) {
+ dep.failed = err;
+ }
+ }
+ }
+
+ callbacks.onPackageDownloadError(
+ extract_ctx,
+ extract.package_id,
+ extract.name.slice(),
+ extract.resolution,
+ err,
+ task.url_buf,
+ );
+ } else if (comptime log_level != .silent) {
const fmt = "\n<r><red><b>GET<r><red> {s}<d> - {d}<r>\n";
const args = .{
task.http.client.url.href,
@@ -2702,6 +3243,7 @@ pub const PackageManager = struct {
Output.flush();
}
}
+
continue;
}
@@ -2740,11 +3282,30 @@ pub const PackageManager = struct {
switch (task.tag) {
.package_manifest => {
if (task.status == .fail) {
- if (comptime log_level != .silent) {
+ const name = task.request.package_manifest.name;
+ const err = task.err orelse error.Failed;
+
+ if (@TypeOf(callbacks.onPackageManifestError) != void) {
+ if (manager.dynamic_root_dependencies) |*root_deps| {
+ var deps: []Dependency.Pair = root_deps.items;
+ for (deps) |*dep| {
+ if (strings.eql(manager.lockfile.str(dep.dependency.name), name.slice())) {
+ dep.failed = dep.failed orelse err;
+ }
+ }
+ }
+
+ callbacks.onPackageManifestError(
+ extract_ctx,
+ name.slice(),
+ err,
+ task.request.package_manifest.network.url_buf,
+ );
+ } else if (comptime log_level != .silent) {
const fmt = "\n<r><red>rerror<r>: {s} parsing package manifest for <b>{s}<r>";
- const error_name: string = if (task.err != null) std.mem.span(@errorName(task.err.?)) else @as(string, "Failed");
+ const error_name: string = @errorName(err);
- const args = .{ error_name, task.request.package_manifest.name.slice() };
+ const args = .{ error_name, name.slice() };
if (comptime log_level.showProgress()) {
Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress);
} else {
@@ -2764,20 +3325,7 @@ pub const PackageManager = struct {
var dependency_list = dependency_list_entry.value_ptr.*;
dependency_list_entry.value_ptr.* = .{};
- if (dependency_list.items.len > 0) {
- for (dependency_list.items) |item| {
- var dependency = manager.lockfile.buffers.dependencies.items[item.dependency];
- var resolution = manager.lockfile.buffers.resolutions.items[item.dependency];
-
- try manager.enqueueDependency(
- item.dependency,
- dependency,
- resolution,
- );
- }
-
- dependency_list.deinit(manager.allocator);
- }
+ try manager.processDependencyList(dependency_list, ExtractCompletionContext, extract_ctx, callbacks);
if (comptime log_level.showProgress()) {
if (!has_updated_this_run) {
@@ -2788,9 +3336,28 @@ pub const PackageManager = struct {
},
.extract => {
if (task.status == .fail) {
- if (comptime log_level != .silent) {
+ const err = task.err orelse error.TarballFailedToExtract;
+ if (@TypeOf(callbacks.onPackageDownloadError) != void) {
+ if (manager.dynamic_root_dependencies) |*root_deps| {
+ var deps: []Dependency.Pair = root_deps.items;
+ for (deps) |*dep| {
+ if (dep.resolution_id == task.request.extract.tarball.package_id) {
+ dep.failed = dep.failed orelse err;
+ }
+ }
+ }
+
+ callbacks.onPackageDownloadError(
+ extract_ctx,
+ task.request.extract.tarball.package_id,
+ task.request.extract.tarball.name.slice(),
+ task.request.extract.tarball.resolution,
+ err,
+ task.request.extract.network.url_buf,
+ );
+ } else if (comptime log_level != .silent) {
const fmt = "<r><red>error<r>: {s} extracting tarball for <b>{s}<r>";
- const error_name: string = if (task.err != null) std.mem.span(@errorName(task.err.?)) else @as(string, "Failed");
+ const error_name: string = @errorName(err);
const args = .{
error_name,
task.request.extract.tarball.name.slice(),
@@ -2809,10 +3376,11 @@ pub const PackageManager = struct {
}
const package_id = task.request.extract.tarball.package_id;
manager.extracted_count += 1;
+ bun.Analytics.Features.extracted_packages = true;
manager.setPreinstallState(package_id, manager.lockfile, .done);
- if (comptime ExtractCompletionContext != void) {
- callback_fn(extract_ctx, package_id, comptime log_level);
+ if (comptime @TypeOf(callbacks.onExtract) != void) {
+ callbacks.onExtract(extract_ctx, package_id, comptime log_level);
}
if (comptime log_level.showProgress()) {
@@ -2839,7 +3407,7 @@ pub const PackageManager = struct {
manager.network_resolve_batch = .{};
if (comptime log_level.showProgress()) {
- if (comptime ExtractCompletionContext == void) {
+ if (comptime ExtractCompletionContext == void or (@hasField(@TypeOf(callbacks), "progress_bar") and callbacks.progress_bar == true)) {
const completed_items = manager.total_tasks - manager.pending_tasks;
if (completed_items != manager.downloads_node.?.unprotected_completed_items or has_updated_this_run) {
manager.downloads_node.?.setCompletedItems(completed_items);
@@ -3598,7 +4166,7 @@ pub const PackageManager = struct {
return try initWithCLI(_ctx, package_json_file_, cli);
}
- fn initWithCLI(
+ pub fn initWithCLI(
ctx: Command.Context,
package_json_file_: ?std.fs.File,
cli: CommandLineArguments,
@@ -3748,7 +4316,132 @@ pub const PackageManager = struct {
ctx.install,
);
- manager.timestamp = @truncate(u32, @intCast(u64, @maximum(std.time.timestamp(), 0)));
+ manager.timestamp_for_manifest_cache_control = @truncate(u32, @intCast(u64, @maximum(std.time.timestamp(), 0)));
+ return manager;
+ }
+
+ pub fn initWithRuntime(
+ log: *logger.Log,
+ bun_install: ?*Api.BunInstall,
+ allocator: std.mem.Allocator,
+ cli: CommandLineArguments,
+ env_loader: *DotEnv.Loader,
+ ) !*PackageManager {
+ if (env_loader.map.get("BUN_INSTALL_VERBOSE") != null) {
+ PackageManager.verbose_install = true;
+ }
+
+ var cpu_count = @truncate(u32, ((try std.Thread.getCpuCount()) + 1));
+
+ if (env_loader.map.get("GOMAXPROCS")) |max_procs| {
+ if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count_| {
+ cpu_count = @minimum(cpu_count, cpu_count_);
+ } else |_| {}
+ }
+
+ var manager = &instance;
+ var root_dir = try Fs.FileSystem.instance.fs.readDirectory(
+ Fs.FileSystem.instance.top_level_dir,
+ null,
+ );
+ // var progress = Progress{};
+ // var node = progress.start(name: []const u8, estimated_total_items: usize)
+ manager.* = PackageManager{
+ .options = .{},
+ .network_task_fifo = NetworkQueue.init(),
+ .env_loader = env_loader,
+ .allocator = allocator,
+ .log = log,
+ .root_dir = &root_dir.entries,
+ .env = env_loader,
+ .cpu_count = cpu_count,
+ .thread_pool = ThreadPool.init(.{
+ .max_threads = cpu_count,
+ }),
+ .resolve_tasks = TaskChannel.init(),
+ .lockfile = undefined,
+ .root_package_json_file = undefined,
+ .waiter = try Waker.init(allocator),
+ };
+ manager.lockfile = try allocator.create(Lockfile);
+
+ if (Output.enable_ansi_colors_stderr) {
+ manager.progress = Progress{};
+ manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
+ manager.root_progress_node = manager.progress.start("", 0);
+ manager.root_download_node = manager.root_progress_node.start(ProgressStrings.download(), 0);
+ }
+
+ if (!manager.options.enable.cache) {
+ manager.options.enable.manifest_cache = false;
+ manager.options.enable.manifest_cache_control = false;
+ }
+
+ if (env_loader.map.get("BUN_MANIFEST_CACHE")) |manifest_cache| {
+ if (strings.eqlComptime(manifest_cache, "1")) {
+ manager.options.enable.manifest_cache = true;
+ manager.options.enable.manifest_cache_control = false;
+ } else if (strings.eqlComptime(manifest_cache, "2")) {
+ manager.options.enable.manifest_cache = true;
+ manager.options.enable.manifest_cache_control = true;
+ } else {
+ manager.options.enable.manifest_cache = false;
+ manager.options.enable.manifest_cache_control = false;
+ }
+ }
+
+ try manager.options.load(
+ allocator,
+ log,
+ env_loader,
+ cli,
+ bun_install,
+ );
+
+ manager.timestamp_for_manifest_cache_control = @truncate(
+ u32,
+ @intCast(
+ u64,
+ @maximum(
+ std.time.timestamp(),
+ 0,
+ ),
+ ),
+ // When using "bun install", we check for updates with a 300 second cache.
+ // When using bun, we only do staleness checks once per day
+ ) -| std.time.s_per_day;
+
+ manager.lockfile = brk: {
+ var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+
+ if (root_dir.entries.hasComptimeQuery("bun.lockb")) {
+ var parts = [_]string{
+ "./bun.lockb",
+ };
+ var lockfile_path = Path.joinAbsStringBuf(
+ Fs.FileSystem.instance.top_level_dir,
+ &buf,
+ &parts,
+ .auto,
+ );
+ buf[lockfile_path.len] = 0;
+ var lockfile_path_z = std.meta.assumeSentinel(buf[0..lockfile_path.len], 0);
+
+ const result = manager.lockfile.loadFromDisk(
+ allocator,
+ log,
+ lockfile_path_z,
+ );
+
+ if (result == .ok) {
+ break :brk result.ok;
+ }
+ }
+
+ try manager.lockfile.initEmpty(allocator);
+ break :brk manager.lockfile;
+ };
+
return manager;
}
@@ -4516,7 +5209,24 @@ pub const PackageManager = struct {
}
var updates = UpdateRequest.parse(ctx.allocator, ctx.log, manager.options.positionals[1..], &update_requests, op);
+ try updatePackageJSONAndInstallWithManagerWithUpdates(
+ ctx,
+ manager,
+ updates,
+ false,
+ op,
+ log_level,
+ );
+ }
+ pub fn updatePackageJSONAndInstallWithManagerWithUpdates(
+ ctx: Command.Context,
+ manager: *PackageManager,
+ updates: []UpdateRequest,
+ auto_free: bool,
+ comptime op: Lockfile.Package.Diff.Op,
+ comptime log_level: Options.LogLevel,
+ ) !void {
if (ctx.log.errors > 0) {
if (comptime log_level != .silent) {
if (Output.enable_ansi_colors) {
@@ -4672,7 +5382,9 @@ pub const PackageManager = struct {
var new_package_json_source = try ctx.allocator.dupe(u8, package_json_writer.ctx.writtenWithoutTrailingZero());
// Do not free the old package.json AST nodes
- _ = JSAst.Expr.Data.Store.toOwnedSlice();
+ var old_ast_nodes = JSAst.Expr.Data.Store.toOwnedSlice();
+ // haha unless
+ defer if (auto_free) bun.default_allocator.free(old_ast_nodes);
try installWithManager(ctx, manager, new_package_json_source, log_level);
@@ -5054,28 +5766,15 @@ pub const PackageManager = struct {
switch (resolution.tag) {
.npm => {
std.debug.assert(resolution.value.npm.url.len() > 0);
-
- const task_id = Task.Id.forNPMPackage(Task.Tag.extract, name, resolution.value.npm.version);
- var task_queue = this.manager.task_queue.getOrPut(this.manager.allocator, task_id) catch unreachable;
- if (!task_queue.found_existing) {
- task_queue.value_ptr.* = .{};
- }
-
- task_queue.value_ptr.append(
- this.manager.allocator,
+ this.manager.enqueuePackageForDownload(
+ name,
+ package_id,
+ resolution.value.npm.version,
+ resolution.value.npm.url.slice(buf),
.{
.node_modules_folder = @intCast(u32, this.node_modules_folder.fd),
},
- ) catch unreachable;
-
- if (!task_queue.found_existing) {
- if (this.manager.generateNetworkTaskForTarball(task_id, resolution.value.npm.url.slice(buf), this.lockfile.packages.get(package_id)) catch unreachable) |task| {
- task.schedule(&this.manager.network_tarball_batch);
- if (this.manager.network_tarball_batch.len > 0) {
- _ = this.manager.scheduleNetworkTasks();
- }
- }
- }
+ );
},
else => {
Output.prettyErrorln(
@@ -5128,6 +5827,35 @@ pub const PackageManager = struct {
}
};
+ pub fn enqueuePackageForDownload(
+ this: *PackageManager,
+ name: []const u8,
+ package_id: PackageID,
+ version: Semver.Version,
+ url: []const u8,
+ task_context: TaskCallbackContext,
+ ) void {
+ const task_id = Task.Id.forNPMPackage(Task.Tag.extract, name, version);
+ var task_queue = this.task_queue.getOrPut(this.allocator, task_id) catch unreachable;
+ if (!task_queue.found_existing) {
+ task_queue.value_ptr.* = .{};
+ }
+
+ task_queue.value_ptr.append(
+ this.allocator,
+ task_context,
+ ) catch unreachable;
+
+ if (!task_queue.found_existing) {
+ if (this.generateNetworkTaskForTarball(task_id, url, this.lockfile.packages.get(package_id)) catch unreachable) |task| {
+ task.schedule(&this.network_tarball_batch);
+ if (this.network_tarball_batch.len > 0) {
+ _ = this.scheduleNetworkTasks();
+ }
+ }
+ }
+ }
+
pub fn installPackages(
this: *PackageManager,
lockfile_: *Lockfile,
@@ -5258,7 +5986,12 @@ pub const PackageManager = struct {
try this.runTasks(
*PackageInstaller,
&installer,
- PackageInstaller.installEnqueuedPackages,
+ .{
+ .onExtract = PackageInstaller.installEnqueuedPackages,
+ .onResolve = void{},
+ .onPackageManifestError = void{},
+ .onPackageDownloadError = void{},
+ },
log_level,
);
if (!installer.options.do.install_packages) return error.InstallFailed;
@@ -5272,7 +6005,12 @@ pub const PackageManager = struct {
try this.runTasks(
*PackageInstaller,
&installer,
- PackageInstaller.installEnqueuedPackages,
+ .{
+ .onExtract = PackageInstaller.installEnqueuedPackages,
+ .onResolve = void{},
+ .onPackageManifestError = void{},
+ .onPackageDownloadError = void{},
+ },
log_level,
);
if (!installer.options.do.install_packages) return error.InstallFailed;
@@ -5282,7 +6020,12 @@ pub const PackageManager = struct {
try this.runTasks(
*PackageInstaller,
&installer,
- PackageInstaller.installEnqueuedPackages,
+ .{
+ .onExtract = PackageInstaller.installEnqueuedPackages,
+ .onResolve = void{},
+ .onPackageManifestError = void{},
+ .onPackageDownloadError = void{},
+ },
log_level,
);
}
@@ -5386,6 +6129,31 @@ pub const PackageManager = struct {
manager.options.bin_path = std.meta.assumeSentinel(try FileSystem.instance.dirname_store.append([:0]u8, result_), 0);
}
+ pub fn startProgressBarIfNone(manager: *PackageManager) void {
+ if (manager.downloads_node == null) {
+ manager.startProgressBar();
+ }
+ }
+ pub fn startProgressBar(manager: *PackageManager) void {
+ manager.downloads_node = manager.progress.start(ProgressStrings.download(), 0);
+ manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
+ manager.setNodeName(manager.downloads_node.?, ProgressStrings.download_no_emoji_, ProgressStrings.download_emoji, true);
+ manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks + manager.extracted_count);
+ manager.downloads_node.?.setCompletedItems(manager.total_tasks - manager.pending_tasks);
+ manager.downloads_node.?.activate();
+ manager.progress.refresh();
+ }
+
+ pub fn endProgressBar(manager: *PackageManager) void {
+ var downloads_node = manager.downloads_node orelse return;
+ downloads_node.setEstimatedTotalItems(downloads_node.unprotected_estimated_total_items);
+ downloads_node.setCompletedItems(downloads_node.unprotected_estimated_total_items);
+ manager.progress.refresh();
+ manager.progress.root.end();
+ manager.progress = .{};
+ manager.downloads_node = null;
+ }
+
fn installWithManager(
ctx: Command.Context,
manager: *PackageManager,
@@ -5637,13 +6405,7 @@ pub const PackageManager = struct {
}
if (comptime log_level.showProgress()) {
- manager.downloads_node = manager.progress.start(ProgressStrings.download(), 0);
- manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
- manager.setNodeName(manager.downloads_node.?, ProgressStrings.download_no_emoji_, ProgressStrings.download_emoji, true);
- manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks + manager.extracted_count);
- manager.downloads_node.?.setCompletedItems(manager.total_tasks - manager.pending_tasks);
- manager.downloads_node.?.activate();
- manager.progress.refresh();
+ manager.startProgressBar();
} else if (comptime log_level != .silent) {
Output.prettyErrorln(" Resolving dependencies", .{});
Output.flush();
@@ -5651,17 +6413,17 @@ pub const PackageManager = struct {
{
while (manager.pending_tasks > 0) : (manager.sleep()) {
- try manager.runTasks(void, void{}, null, log_level);
+ try manager.runTasks(void, void{}, .{
+ .onExtract = void{},
+ .onResolve = void{},
+ .onPackageManifestError = void{},
+ .onPackageDownloadError = void{},
+ }, log_level);
}
}
if (comptime log_level.showProgress()) {
- manager.downloads_node.?.setEstimatedTotalItems(manager.downloads_node.?.unprotected_estimated_total_items);
- manager.downloads_node.?.setCompletedItems(manager.downloads_node.?.unprotected_estimated_total_items);
- manager.progress.refresh();
- manager.progress.root.end();
- manager.progress = .{};
- manager.downloads_node = null;
+ manager.endProgressBar();
} else if (comptime log_level != .silent) {
Output.prettyErrorln(" Resolved, downloaded and extracted [{d}]", .{manager.total_tasks});
Output.flush();
@@ -5986,3 +6748,13 @@ test "UpdateRequests.parse" {
try std.testing.expectEqualStrings(reqs[5].version.literal.slice("bing@1.0.0"), "latest");
try std.testing.expectEqual(updates.len, 6);
}
+
+pub const PackageManifestError = error{
+ PackageManifestHTTP400,
+ PackageManifestHTTP401,
+ PackageManifestHTTP402,
+ PackageManifestHTTP403,
+ PackageManifestHTTP404,
+ PackageManifestHTTP4xx,
+ PackageManifestHTTP5xx,
+};
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index cca34b20b..dfc49ddd0 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -14,6 +14,7 @@ const JSLexer = @import("../js_lexer.zig");
const logger = @import("../logger.zig");
const js_parser = @import("../js_parser.zig");
+const Expr = @import("../js_ast.zig").Expr;
const json_parser = @import("../json_parser.zig");
const JSPrinter = @import("../js_printer.zig");
@@ -82,6 +83,8 @@ const Crypto = @import("../sha.zig").Hashers;
pub const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8;
const zero_hash = std.mem.zeroes(MetaHash);
+const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
+
pub const ExternalStringBuilder = StructBuilder.Builder(ExternalString);
pub const SmallExternalStringList = ExternalSlice(String);
@@ -1661,7 +1664,8 @@ pub const StringBuilder = struct {
}
}
- pub fn allocatedSlice(this: *StringBuilder) ![]u8 {
+ pub fn allocatedSlice(this: *StringBuilder) []const u8 {
+ if (this.ptr == null) return "";
return this.ptr.?[0..this.cap];
}
@@ -1796,7 +1800,7 @@ pub const StringBuffer = std.ArrayListUnmanaged(u8);
pub const ExternalStringBuffer = std.ArrayListUnmanaged(ExternalString);
pub const Package = extern struct {
- const DependencyGroup = struct {
+ pub const DependencyGroup = struct {
prop: string,
field: string,
behavior: Behavior,
@@ -1933,6 +1937,125 @@ pub const Package = extern struct {
return new_package.meta.id;
}
+ pub fn fromPackageJSON(
+ allocator: std.mem.Allocator,
+ lockfile: *Lockfile,
+ log: *logger.Log,
+ package_json: *PackageJSON,
+ comptime features: Features,
+ ) !Lockfile.Package {
+ var package = Lockfile.Package{};
+
+ // var string_buf = package_json;
+
+ var string_builder = lockfile.stringBuilder();
+
+ var total_dependencies_count: u32 = 0;
+ // var bin_extern_strings_count: u32 = 0;
+
+ // --- Counting
+ {
+ string_builder.count(package_json.name);
+ string_builder.count(package_json.version);
+ var dependencies = package_json.dependencies.map.values();
+ for (dependencies) |dep| {
+ if (dep.behavior.isEnabled(features)) {
+ dep.count(package_json.dependencies.source_buf, @TypeOf(&string_builder), &string_builder);
+ total_dependencies_count += 1;
+ }
+ }
+ }
+
+ // string_builder.count(manifest.str(package_version_ptr.tarball_url));
+
+ try string_builder.allocate();
+ defer string_builder.clamp();
+ // var extern_strings_list = &lockfile.buffers.extern_strings;
+ var dependencies_list = &lockfile.buffers.dependencies;
+ var resolutions_list = &lockfile.buffers.resolutions;
+ try dependencies_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count);
+ try resolutions_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count);
+ // try extern_strings_list.ensureUnusedCapacity(lockfile.allocator, bin_extern_strings_count);
+ // extern_strings_list.items.len += bin_extern_strings_count;
+
+ // -- Cloning
+ {
+ const package_name: ExternalString = string_builder.append(ExternalString, package_json.name);
+ package.name_hash = package_name.hash;
+ package.name = package_name.value;
+ var package_version = string_builder.append(String, package_json.version);
+ var buf = string_builder.allocatedSlice();
+
+ const version: Dependency.Version = brk: {
+ if (package_json.version.len > 0) {
+ const sliced = package_version.sliced(buf);
+ const name = package.name.slice(buf);
+ if (Dependency.parse(allocator, name, &sliced, log)) |dep| {
+ break :brk dep;
+ }
+ }
+
+ break :brk Dependency.Version{};
+ };
+
+ if (version.tag == .npm and version.value.npm.isExact()) {
+ package.resolution = Resolution{
+ .value = .{
+ .npm = .{
+ .version = version.value.npm.toVersion(),
+ .url = .{},
+ },
+ },
+ .tag = .npm,
+ };
+ } else {
+ package.resolution = Resolution{
+ .value = .{
+ .root = {},
+ },
+ .tag = .root,
+ };
+ }
+ const total_len = dependencies_list.items.len + total_dependencies_count;
+ std.debug.assert(dependencies_list.items.len == resolutions_list.items.len);
+
+ var dependencies: []Dependency = dependencies_list.items.ptr[dependencies_list.items.len..total_len];
+ std.mem.set(Dependency, dependencies, Dependency{});
+
+ const package_dependencies = package_json.dependencies.map.values();
+ const source_buf = package_json.dependencies.source_buf;
+ for (package_dependencies) |dep| {
+ if (!dep.behavior.isEnabled(features)) continue;
+
+ dependencies[0] = try dep.clone(source_buf, @TypeOf(&string_builder), &string_builder);
+ dependencies = dependencies[1..];
+ if (dependencies.len == 0) break;
+ }
+
+ // We lose the bin info here
+ // package.bin = package_version.bin.clone(string_buf, manifest.extern_strings_bin_entries, extern_strings_list.items, extern_strings_slice, @TypeOf(&string_builder), &string_builder);
+ // and the integriy hash
+ // package.meta.integrity = package_version.integrity;
+
+ package.meta.arch = package_json.arch;
+ package.meta.os = package_json.os;
+
+ package.dependencies.off = @truncate(u32, dependencies_list.items.len);
+ package.dependencies.len = total_dependencies_count - @truncate(u32, dependencies.len);
+ package.resolutions.off = package.dependencies.off;
+ package.resolutions.len = package.dependencies.len;
+
+ const new_length = package.dependencies.len + dependencies_list.items.len;
+
+ std.mem.set(PackageID, resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id);
+
+ dependencies_list.items = dependencies_list.items.ptr[0..new_length];
+ resolutions_list.items = resolutions_list.items.ptr[0..new_length];
+
+ return package;
+ }
+ }
+
pub fn fromNPM(
allocator: std.mem.Allocator,
lockfile: *Lockfile,
@@ -2250,6 +2373,30 @@ pub const Package = extern struct {
Global.exit(1);
};
+ try parseWithJSON(
+ package,
+ lockfile,
+ allocator,
+ log,
+ source,
+ json,
+ ResolverContext,
+ resolver,
+ features,
+ );
+ }
+
+ pub fn parseWithJSON(
+ package: *Lockfile.Package,
+ lockfile: *Lockfile,
+ allocator: std.mem.Allocator,
+ log: *logger.Log,
+ source: logger.Source,
+ json: Expr,
+ comptime ResolverContext: type,
+ resolver: ResolverContext,
+ comptime features: Features,
+ ) !void {
var string_builder = lockfile.stringBuilder();
var total_dependencies_count: u32 = 0;
@@ -3092,3 +3239,37 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaH
return digest;
}
+
+pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Version) ?PackageID {
+ const name_hash = bun.hash(package_name);
+ const entry = this.package_index.get(name_hash) orelse return null;
+ const can_satisfy = version.tag == .npm;
+
+ switch (entry) {
+ .PackageID => |id| {
+ const resolutions = this.packages.items(.resolution);
+
+ if (can_satisfy and version.value.npm.satisfies(resolutions[id].value.npm.version)) {
+ return id;
+ }
+ },
+ .PackageIDMultiple => |multi_| {
+ const multi = std.mem.span(multi_);
+ const resolutions = this.packages.items(.resolution);
+
+ for (multi) |id| {
+ if (comptime Environment.isDebug or Environment.isTest) {
+ std.debug.assert(id != invalid_package_id);
+ }
+
+ if (id == invalid_package_id - 1) return null;
+
+ if (can_satisfy and version.value.npm.satisfies(resolutions[id].value.npm.version)) {
+ return id;
+ }
+ }
+ },
+ }
+
+ return null;
+}
diff --git a/src/install/npm.zig b/src/install/npm.zig
index 1ff5b0c2f..2be84624c 100644
--- a/src/install/npm.zig
+++ b/src/install/npm.zig
@@ -163,6 +163,7 @@ pub const Registry = struct {
log: *logger.Log,
package_name: string,
loaded_manifest: ?PackageManifest,
+ package_manager: *PackageManager,
) !PackageVersionResponse {
switch (response.status_code) {
400 => return error.BadRequest,
@@ -193,7 +194,6 @@ pub const Registry = struct {
}
}
- initializeStore();
var new_etag_buf: [64]u8 = undefined;
if (new_etag.len < new_etag_buf.len) {
@@ -210,8 +210,8 @@ pub const Registry = struct {
new_etag,
@truncate(u32, @intCast(u64, @maximum(0, std.time.timestamp()))) + 300,
)) |package| {
- if (PackageManager.instance.options.enable.manifest_cache) {
- PackageManifest.Serializer.save(&package, PackageManager.instance.getTemporaryDirectory(), PackageManager.instance.getCacheDirectory()) catch {};
+ if (package_manager.options.enable.manifest_cache) {
+ PackageManifest.Serializer.save(&package, package_manager.getTemporaryDirectory(), package_manager.getCacheDirectory()) catch {};
}
return PackageVersionResponse{ .fresh = package };
diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig
index bf9d5a78b..b48600747 100644
--- a/src/install/resolvers/folder_resolver.zig
+++ b/src/install/resolvers/folder_resolver.zig
@@ -12,6 +12,7 @@ const IdentityContext = @import("../../identity_context.zig").IdentityContext;
const strings = @import("strings");
const Resolution = @import("../resolution.zig").Resolution;
const String = @import("../semver.zig").String;
+const Semver = @import("../semver.zig");
const bun = @import("../../global.zig");
const Dependency = @import("../dependency.zig");
pub const FolderResolution = union(Tag) {
@@ -50,6 +51,24 @@ pub const FolderResolution = union(Tag) {
pub const Resolver = NewResolver(Resolution.Tag.folder);
pub const SymlinkResolver = NewResolver(Resolution.Tag.symlink);
+ pub const CacheFolderResolver = struct {
+ folder_path: []const u8 = "",
+ version: Semver.Version,
+
+ pub fn resolve(this: @This(), comptime Builder: type, _: Builder, _: JSAst.Expr) !Resolution {
+ return Resolution{
+ .tag = Resolution.Tag.npm,
+ .value = .{
+ .npm = .{
+ .version = this.version,
+ .url = String.init("", ""),
+ },
+ },
+ };
+ }
+
+ pub fn count(_: @This(), comptime Builder: type, _: Builder, _: JSAst.Expr) void {}
+ };
pub fn normalizePackageJSONPath(global_or_relative: GlobalOrRelative, joined: *[bun.MAX_PATH_BYTES]u8, non_normalized_path: string) [2]string {
var abs: string = "";
@@ -69,16 +88,22 @@ pub const FolderResolution = union(Tag) {
} else {
var remain: []u8 = joined[0..];
switch (global_or_relative) {
- .global => |path| {
- const offset = path.len - @as(usize, @boolToInt(path[path.len - 1] == std.fs.path.sep));
- @memcpy(remain.ptr, path.ptr, offset);
- remain = remain[offset..];
- if ((path[path.len - 1] != std.fs.path.sep) and (normalized[0] != std.fs.path.sep)) {
- remain[0] = std.fs.path.sep;
- remain = remain[1..];
+ .global, .cache_folder => {
+ const path = if (global_or_relative == .global) global_or_relative.global else global_or_relative.cache_folder;
+ if (path.len > 0) {
+ const offset = path.len -| @as(usize, @boolToInt(path[path.len -| 1] == std.fs.path.sep));
+ if (offset > 0)
+ @memcpy(remain.ptr, path.ptr, offset);
+ remain = remain[offset..];
+ if (normalized.len > 0) {
+ if ((path[path.len - 1] != std.fs.path.sep) and (normalized[0] != std.fs.path.sep)) {
+ remain[0] = std.fs.path.sep;
+ remain = remain[1..];
+ }
+ }
}
},
- .relative => {},
+ else => {},
}
std.mem.copy(u8, remain, normalized);
remain[normalized.len] = std.fs.path.sep;
@@ -136,6 +161,7 @@ pub const FolderResolution = union(Tag) {
pub const GlobalOrRelative = union(enum) {
global: []const u8,
relative: void,
+ cache_folder: []const u8,
};
pub fn getOrPut(global_or_relative: GlobalOrRelative, version: Dependency.Version, non_normalized_path: string, manager: *PackageManager) FolderResolution {
@@ -149,7 +175,7 @@ pub const FolderResolution = union(Tag) {
joined[abs.len] = 0;
var joinedZ: [:0]u8 = joined[0..abs.len :0];
- const package = switch (global_or_relative) {
+ const package: Lockfile.Package = switch (global_or_relative) {
.global => readPackageJSONFromDisk(
manager,
joinedZ,
@@ -168,6 +194,15 @@ pub const FolderResolution = union(Tag) {
Resolver,
Resolver{ .folder_path = rel },
),
+ .cache_folder => readPackageJSONFromDisk(
+ manager,
+ joinedZ,
+ abs,
+ version,
+ Features.npm,
+ CacheFolderResolver,
+ CacheFolderResolver{ .version = version.value.npm.toVersion() },
+ ),
} catch |err| {
if (err == error.FileNotFound) {
entry.value_ptr.* = .{ .err = error.MissingPackageJSON };
diff --git a/src/install/semver.zig b/src/install/semver.zig
index 8e06b78ce..6d376c875 100644
--- a/src/install/semver.zig
+++ b/src/install/semver.zig
@@ -80,6 +80,34 @@ pub const String = extern struct {
}
}
+ pub const HashContext = struct {
+ a_buf: []const u8,
+ b_buf: []const u8,
+
+ pub fn eql(ctx: HashContext, a: String, b: String) bool {
+ return a.eql(b, ctx.a_buf, ctx.b_buf);
+ }
+
+ pub fn hash(ctx: HashContext, a: String) u64 {
+ const str = a.slice(ctx.a_buf);
+ return bun.hash(str);
+ }
+ };
+
+ pub const ArrayHashContext = struct {
+ a_buf: []const u8,
+ b_buf: []const u8,
+
+ pub fn eql(ctx: ArrayHashContext, a: String, b: String, _: usize) bool {
+ return a.eql(b, ctx.a_buf, ctx.b_buf);
+ }
+
+ pub fn hash(ctx: ArrayHashContext, a: String) u32 {
+ const str = a.slice(ctx.a_buf);
+ return @truncate(u32, bun.hash(str));
+ }
+ };
+
pub fn init(
buf: string,
in: string,
@@ -267,9 +295,44 @@ pub const String = extern struct {
return @call(.{ .modifier = .always_inline }, appendWithHash, .{ this, Type, slice_, stringHash(slice_) });
}
+ pub fn appendUTF8WithoutPool(this: *Builder, comptime Type: type, slice_: string, hash: u64) Type {
+ if (slice_.len <= String.max_inline_len) {
+ if (strings.isAllASCII(slice_)) {
+ switch (Type) {
+ String => {
+ return String.init(this.allocatedSlice(), slice_);
+ },
+ ExternalString => {
+ return ExternalString.init(this.allocatedSlice(), slice_, hash);
+ },
+ else => @compileError("Invalid type passed to StringBuilder"),
+ }
+ }
+ }
+
+ assert(this.len <= this.cap); // didn't count everything
+ assert(this.ptr != null); // must call allocate first
+
+ copy(u8, this.ptr.?[this.len..this.cap], slice_);
+ const final_slice = this.ptr.?[this.len..this.cap][0..slice_.len];
+ this.len += slice_.len;
+
+ assert(this.len <= this.cap);
+
+ switch (Type) {
+ String => {
+ return String.init(this.allocatedSlice(), final_slice);
+ },
+ ExternalString => {
+ return ExternalString.init(this.allocatedSlice(), final_slice, hash);
+ },
+ else => @compileError("Invalid type passed to StringBuilder"),
+ }
+ }
+
// SlicedString is not supported due to inline strings.
pub fn appendWithoutPool(this: *Builder, comptime Type: type, slice_: string, hash: u64) Type {
- if (slice_.len < String.max_inline_len) {
+ if (slice_.len <= String.max_inline_len) {
switch (Type) {
String => {
return String.init(this.allocatedSlice(), slice_);
@@ -301,7 +364,7 @@ pub const String = extern struct {
}
pub fn appendWithHash(this: *Builder, comptime Type: type, slice_: string, hash: u64) Type {
- if (slice_.len < String.max_inline_len) {
+ if (slice_.len <= String.max_inline_len) {
switch (Type) {
String => {
return String.init(this.allocatedSlice(), slice_);
@@ -490,6 +553,15 @@ pub const Version = extern struct {
tag: Tag = Tag{},
// raw: RawType = RawType{},
+ /// Assumes that there is only one buffer for all the strings
+ pub fn sortGt(ctx: []const u8, lhs: Version, rhs: Version) bool {
+ return orderFn(ctx, lhs, rhs) == .gt;
+ }
+
+ pub fn orderFn(ctx: []const u8, lhs: Version, rhs: Version) std.math.Order {
+ return lhs.order(rhs, ctx, ctx);
+ }
+
pub fn cloneInto(this: Version, slice: []const u8, buf: *[]u8) Version {
return Version{
.major = this.major,
@@ -622,7 +694,7 @@ pub const Version = extern struct {
}
if (this.build.isInline()) {
- build = this.pre.build;
+ build = this.build.value;
} else {
const build_slice = this.build.slice(slice);
std.mem.copy(u8, buf.*, build_slice);
@@ -636,7 +708,7 @@ pub const Version = extern struct {
.hash = this.pre.hash,
},
.build = .{
- .value = this.build,
+ .value = build,
.hash = this.build.hash,
},
};
@@ -1202,6 +1274,37 @@ pub const Query = struct {
pub const build = 0;
};
+ pub fn deinit(this: *Group) void {
+ var list = this.head;
+ var allocator = this.allocator;
+
+ while (list.next) |next| {
+ var query = list.head;
+ while (query.next) |next_query| {
+ allocator.destroy(next_query);
+ query = next_query.*;
+ }
+ allocator.destroy(next);
+ list = next.*;
+ }
+ }
+
+ pub fn from(version: Version) Group {
+ return .{
+ .allocator = bun.default_allocator,
+ .head = .{
+ .head = .{
+ .range = .{
+ .left = .{
+ .op = .eql,
+ .version = version,
+ },
+ },
+ },
+ },
+ };
+ }
+
pub const FlagsBitSet = std.bit_set.IntegerBitSet(3);
pub fn isExact(this: *const Group) bool {
@@ -1212,6 +1315,11 @@ pub const Query = struct {
return lhs.head.eql(&rhs.head);
}
+ pub fn toVersion(this: Group) Version {
+ std.debug.assert(this.isExact() or this.head.head.range.left.op == .unset);
+ return this.head.head.range.left.version;
+ }
+
pub fn orVersion(self: *Group, version: Version) !void {
if (self.tail == null and !self.head.head.range.hasLeft()) {
self.head.head.range.left.version = version;
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 31bc056fb..4ae5e5915 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -48,7 +48,7 @@ pub fn NewBaseStore(comptime Union: anytype, comptime count: usize) type {
store: Self,
};
- const Block = struct {
+ pub const Block = struct {
used: SizeType = 0,
items: [count]UnionValueType align(MaxAlign) = undefined,
@@ -4450,6 +4450,15 @@ pub const Ast = struct {
} };
try std.json.stringify(self.parts, opts, stream);
}
+
+ /// Do not call this if it wasn't globally allocated!
+ pub fn deinit(this: *Ast) void {
+ // TODO: assert mimalloc-owned memory
+ if (this.parts.len > 0) bun.default_allocator.free(this.parts);
+ if (this.externals.len > 0) bun.default_allocator.free(this.externals);
+ if (this.symbols.len > 0) bun.default_allocator.free(this.symbols);
+ if (this.import_records.len > 0) bun.default_allocator.free(this.import_records);
+ }
};
pub const Span = struct {
@@ -4866,6 +4875,7 @@ pub const Macro = struct {
"Macro \"{s}\" not found",
.{import_record_path},
.stmt,
+ err,
) catch unreachable;
return error.MacroNotFound;
},
diff --git a/src/jsc.zig b/src/jsc.zig
index 4d98536d9..07f5d4d26 100644
--- a/src/jsc.zig
+++ b/src/jsc.zig
@@ -9,6 +9,7 @@ pub usingnamespace @import("./bun.js/base.zig");
pub const RareData = @import("./bun.js/rare_data.zig");
pub const Shimmer = @import("./bun.js/bindings/shimmer.zig").Shimmer;
pub usingnamespace @import("./bun.js/javascript.zig");
+pub usingnamespace @import("./bun.js/module_loader.zig");
pub const C = @import("./bun.js/javascript_core_c_api.zig");
pub const WebCore = @import("./bun.js/webcore.zig");
pub const Cloudflare = struct {
diff --git a/src/linker.zig b/src/linker.zig
index ce0b3fb65..9855dc82c 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -34,6 +34,7 @@ const _bundler = @import("./bundler.zig");
const Bundler = _bundler.Bundler;
const ResolveQueue = _bundler.ResolveQueue;
const ResolverType = Resolver.Resolver;
+const ESModule = @import("./resolver/package_json.zig").ESModule;
const Runtime = @import("./runtime.zig").Runtime;
const URL = @import("url.zig").URL;
const JSC = @import("javascript_core");
@@ -220,6 +221,8 @@ pub const Linker = struct {
var needs_require = false;
var node_module_bundle_import_path: ?string = null;
+ const is_deferred = result.pending_imports.len > 0;
+
var import_records = result.ast.import_records;
defer {
result.ast.import_records = import_records;
@@ -232,7 +235,8 @@ pub const Linker = struct {
outer: while (record_i < record_count) : (record_i += 1) {
var import_record = &import_records[record_i];
- if (import_record.is_unused) continue;
+ if (import_record.is_unused or
+ (is_bun and is_deferred and !result.isPendingImport(record_i))) continue;
const record_index = record_i;
if (comptime !ignore_runtime) {
@@ -419,18 +423,53 @@ pub const Linker = struct {
},
}
- if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |_resolved_import| {
- switch (import_record.tag) {
- else => {},
- .react_refresh => {
- linker.tagged_resolutions.react_refresh = _resolved_import;
- linker.tagged_resolutions.react_refresh.?.path_pair.primary = linker.tagged_resolutions.react_refresh.?.path().?.dupeAlloc(bun.default_allocator) catch unreachable;
+ if (comptime is_bun) {
+ switch (linker.resolver.resolveAndAutoInstall(
+ source_dir,
+ import_record.path.text,
+ import_record.kind,
+ linker.options.global_cache,
+ )) {
+ .success => |_resolved_import| {
+ switch (import_record.tag) {
+ else => {},
+ .react_refresh => {
+ linker.tagged_resolutions.react_refresh = _resolved_import;
+ linker.tagged_resolutions.react_refresh.?.path_pair.primary = linker.tagged_resolutions.react_refresh.?.path().?.dupeAlloc(bun.default_allocator) catch unreachable;
+ },
+ }
+
+ break :brk _resolved_import;
},
+ .failure => |err| {
+ break :brk err;
+ },
+ .pending => |*pending| {
+ if (!linker.resolver.opts.global_cache.canInstall()) {
+ break :brk error.InstallationPending;
+ }
+
+ pending.import_record_id = record_i;
+ try result.pending_imports.append(linker.allocator, pending.*);
+ continue;
+ },
+ .not_found => break :brk error.ModuleNotFound,
+ // else => unreachable,
}
+ } else {
+ if (linker.resolver.resolve(source_dir, import_record.path.text, import_record.kind)) |_resolved_import| {
+ switch (import_record.tag) {
+ else => {},
+ .react_refresh => {
+ linker.tagged_resolutions.react_refresh = _resolved_import;
+ linker.tagged_resolutions.react_refresh.?.path_pair.primary = linker.tagged_resolutions.react_refresh.?.path().?.dupeAlloc(bun.default_allocator) catch unreachable;
+ },
+ }
- break :brk _resolved_import;
- } else |err| {
- break :brk err;
+ break :brk _resolved_import;
+ } else |err| {
+ break :brk err;
+ }
}
};
@@ -532,6 +571,139 @@ pub const Linker = struct {
}
} else |err| {
switch (err) {
+ error.VersionSpecifierNotAllowedHere => {
+ var subpath_buf: [512]u8 = undefined;
+
+ if (ESModule.Package.parse(import_record.path.text, &subpath_buf)) |pkg| {
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Unexpected version \"{s}\" in import specifier \"{s}\". When a package.json is present, please use one of the \"dependencies\" fields in package.json for setting dependency versions",
+ .{ pkg.version, import_record.path.text },
+ import_record.kind,
+ err,
+ ) catch {};
+ } else {
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Unexpected version in import specifier \"{s}\". When a package.json is present, please use one of the \"dependencies\" fields in package.json to specify the version",
+ .{import_record.path.text},
+ import_record.kind,
+ err,
+ ) catch {};
+ }
+ had_resolve_errors = true;
+ continue;
+ },
+
+ error.NoMatchingVersion => {
+ if (import_record.handles_import_errors) {
+ import_record.path.is_disabled = true;
+ continue;
+ }
+
+ had_resolve_errors = true;
+
+ var package_name = import_record.path.text;
+ var subpath_buf: [512]u8 = undefined;
+ if (ESModule.Package.parse(import_record.path.text, &subpath_buf)) |pkg| {
+ package_name = pkg.name;
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Version \"{s}\" not found for package \"{s}\" (while resolving \"{s}\")",
+ .{ pkg.version, package_name, import_record.path.text },
+ import_record.kind,
+ err,
+ ) catch {};
+ } else {
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Package version not found: \"{s}\"",
+ .{import_record.path.text},
+ import_record.kind,
+ err,
+ ) catch {};
+ }
+ continue;
+ },
+
+ error.DistTagNotFound => {
+ if (import_record.handles_import_errors) {
+ import_record.path.is_disabled = true;
+ continue;
+ }
+
+ had_resolve_errors = true;
+
+ var package_name = import_record.path.text;
+ var subpath_buf: [512]u8 = undefined;
+ if (ESModule.Package.parse(import_record.path.text, &subpath_buf)) |pkg| {
+ package_name = pkg.name;
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Version \"{s}\" not found for package \"{s}\" (while resolving \"{s}\")",
+ .{ pkg.version, package_name, import_record.path.text },
+ import_record.kind,
+ err,
+ ) catch {};
+ } else {
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Package tag not found: \"{s}\"",
+ .{import_record.path.text},
+ import_record.kind,
+ err,
+ ) catch {};
+ }
+
+ continue;
+ },
+
+ error.PackageManifestHTTP404 => {
+ if (import_record.handles_import_errors) {
+ import_record.path.is_disabled = true;
+ continue;
+ }
+
+ had_resolve_errors = true;
+
+ var package_name = import_record.path.text;
+ var subpath_buf: [512]u8 = undefined;
+ if (ESModule.Package.parse(import_record.path.text, &subpath_buf)) |pkg| {
+ package_name = pkg.name;
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Package not found: \"{s}\" (while resolving \"{s}\")",
+ .{ package_name, import_record.path.text },
+ import_record.kind,
+ err,
+ ) catch {};
+ } else {
+ linker.log.addResolveError(
+ &result.source,
+ import_record.range,
+ linker.allocator,
+ "Package not found: \"{s}\"",
+ .{package_name},
+ import_record.kind,
+ err,
+ ) catch {};
+ }
+ continue;
+ },
error.ModuleNotFound => {
if (import_record.handles_import_errors) {
import_record.path.is_disabled = true;
@@ -556,6 +728,7 @@ pub const Linker = struct {
"Could not resolve: \"{s}\". Try setting --platform=\"node\" (after bun build exists)",
.{import_record.path.text},
import_record.kind,
+ err,
);
continue;
} else {
@@ -566,6 +739,7 @@ pub const Linker = struct {
"Could not resolve: \"{s}\". Maybe you need to \"bun install\"?",
.{import_record.path.text},
import_record.kind,
+ err,
);
continue;
}
@@ -579,6 +753,7 @@ pub const Linker = struct {
import_record.path.text,
},
import_record.kind,
+ err,
);
continue;
}
@@ -596,6 +771,7 @@ pub const Linker = struct {
import_record.path.text,
},
import_record.kind,
+ err,
);
continue;
},
diff --git a/src/logger.zig b/src/logger.zig
index 2fc5fa2a6..df3095e23 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -447,6 +447,7 @@ pub const Msg = struct {
pub const Resolve = struct {
specifier: BabyString,
import_kind: ImportKind,
+ err: anyerror = error.ModuleNotFound,
};
};
@@ -775,7 +776,18 @@ pub const Log = struct {
});
}
- inline fn _addResolveErrorWithLevel(log: *Log, source: *const Source, r: Range, allocator: std.mem.Allocator, comptime fmt: string, args: anytype, import_kind: ImportKind, comptime dupe_text: bool, comptime is_error: bool) !void {
+ inline fn _addResolveErrorWithLevel(
+ log: *Log,
+ source: *const Source,
+ r: Range,
+ allocator: std.mem.Allocator,
+ comptime fmt: string,
+ args: anytype,
+ import_kind: ImportKind,
+ comptime dupe_text: bool,
+ comptime is_error: bool,
+ err: anyerror,
+ ) !void {
const text = try std.fmt.allocPrint(allocator, fmt, args);
// TODO: fix this. this is stupid, it should be returned in allocPrint.
const specifier = BabyString.in(text, args.@"0");
@@ -806,18 +818,42 @@ pub const Log = struct {
const msg = Msg{
.kind = if (comptime is_error) Kind.err else Kind.warn,
.data = data,
- .metadata = .{ .resolve = Msg.Metadata.Resolve{ .specifier = specifier, .import_kind = import_kind } },
+ .metadata = .{ .resolve = Msg.Metadata.Resolve{
+ .specifier = specifier,
+ .import_kind = import_kind,
+ .err = err,
+ } },
};
try log.addMsg(msg);
}
- inline fn _addResolveError(log: *Log, source: *const Source, r: Range, allocator: std.mem.Allocator, comptime fmt: string, args: anytype, import_kind: ImportKind, comptime dupe_text: bool) !void {
- return _addResolveErrorWithLevel(log, source, r, allocator, fmt, args, import_kind, dupe_text, true);
+ inline fn _addResolveError(
+ log: *Log,
+ source: *const Source,
+ r: Range,
+ allocator: std.mem.Allocator,
+ comptime fmt: string,
+ args: anytype,
+ import_kind: ImportKind,
+ comptime dupe_text: bool,
+ err: anyerror,
+ ) !void {
+ return _addResolveErrorWithLevel(log, source, r, allocator, fmt, args, import_kind, dupe_text, true, err);
}
- inline fn _addResolveWarn(log: *Log, source: *const Source, r: Range, allocator: std.mem.Allocator, comptime fmt: string, args: anytype, import_kind: ImportKind, comptime dupe_text: bool) !void {
- return _addResolveErrorWithLevel(log, source, r, allocator, fmt, args, import_kind, dupe_text, false);
+ inline fn _addResolveWarn(
+ log: *Log,
+ source: *const Source,
+ r: Range,
+ allocator: std.mem.Allocator,
+ comptime fmt: string,
+ args: anytype,
+ import_kind: ImportKind,
+ comptime dupe_text: bool,
+ err: anyerror,
+ ) !void {
+ return _addResolveErrorWithLevel(log, source, r, allocator, fmt, args, import_kind, dupe_text, false, err);
}
pub fn addResolveError(
@@ -828,9 +864,10 @@ pub const Log = struct {
comptime fmt: string,
args: anytype,
import_kind: ImportKind,
+ err: anyerror,
) !void {
@setCold(true);
- return try _addResolveError(log, source, r, allocator, fmt, args, import_kind, false);
+ return try _addResolveError(log, source, r, allocator, fmt, args, import_kind, false, err);
}
pub fn addResolveErrorWithTextDupe(
@@ -843,7 +880,7 @@ pub const Log = struct {
import_kind: ImportKind,
) !void {
@setCold(true);
- return try _addResolveError(log, source, r, allocator, fmt, args, import_kind, true);
+ return try _addResolveError(log, source, r, allocator, fmt, args, import_kind, true, error.ModuleNotFound);
}
pub fn addResolveErrorWithTextDupeMaybeWarn(
@@ -858,9 +895,9 @@ pub const Log = struct {
) !void {
@setCold(true);
if (warn) {
- return try _addResolveError(log, source, r, allocator, fmt, args, import_kind, true);
+ return try _addResolveError(log, source, r, allocator, fmt, args, import_kind, true, error.ModuleNotFound);
} else {
- return try _addResolveWarn(log, source, r, allocator, fmt, args, import_kind, true);
+ return try _addResolveWarn(log, source, r, allocator, fmt, args, import_kind, true, error.ModuleNotFound);
}
}
diff --git a/src/options.zig b/src/options.zig
index 4cfde3a31..825b8cb37 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -716,6 +716,21 @@ pub const Loader = enum(u4) {
};
}
+ pub fn fromAPI(loader: Api.Loader) Loader {
+ return switch (loader) {
+ .jsx => .jsx,
+ .js => .js,
+ .ts => .ts,
+ .tsx => .tsx,
+ .css => .css,
+ .json => .json,
+ .toml => .toml,
+ .wasm => .wasm,
+ .napi => .napi,
+ else => .file,
+ };
+ }
+
pub fn isJSX(loader: Loader) bool {
return loader == .jsx or loader == .tsx;
}
@@ -1225,6 +1240,11 @@ pub const BundleOptions = struct {
disable_transpilation: bool = false,
+ global_cache: GlobalCache = .disable,
+ prefer_offline_install: bool = false,
+ prefer_latest_install: bool = false,
+ install: ?*Api.BunInstall = null,
+
pub inline fn cssImportBehavior(this: *const BundleOptions) Api.CssInJsBehavior {
switch (this.platform) {
.neutral, .browser => {
@@ -2316,3 +2336,5 @@ pub const RouteConfig = struct {
return router;
}
};
+
+pub const GlobalCache = @import("./resolver/resolver.zig").GlobalCache;
diff --git a/src/output.zig b/src/output.zig
index 3d9241500..d6ba81a3b 100644
--- a/src/output.zig
+++ b/src/output.zig
@@ -376,6 +376,8 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f
std.os.getenv("BUN_DEBUG_" ++ @tagName(tag)) != null)
{
really_disable = false;
+ } else if (std.os.getenv("BUN_DEBUG_QUIET_LOGS") != null) {
+ really_disable = true;
}
}
diff --git a/src/resolver/dir_info.zig b/src/resolver/dir_info.zig
index 86d165df9..0d1bac6a7 100644
--- a/src/resolver/dir_info.zig
+++ b/src/resolver/dir_info.zig
@@ -1,4 +1,5 @@
const bun = @import("../global.zig");
+const std = @import("std");
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
@@ -35,17 +36,37 @@ enclosing_tsconfig_json: ?*const TSConfigJSON = null,
/// https://github.com/oven-sh/bun/issues/229
enclosing_package_json: ?*PackageJSON = null,
+package_json_for_dependencies: ?*PackageJSON = null,
+
abs_path: string = "",
entries: Index = undefined,
-has_node_modules: bool = false, // Is there a "node_modules" subdirectory?
-is_node_modules: bool = false, // Is this a "node_modules" directory?
package_json: ?*PackageJSON = null, // Is there a "package.json" file?
tsconfig_json: ?*TSConfigJSON = null, // Is there a "tsconfig.json" file in this directory or a parent directory?
abs_real_path: string = "", // If non-empty, this is the real absolute path resolving any symlinks
+flags: Flags.Set = Flags.Set{},
+
+/// Is there a "node_modules" subdirectory?
+pub inline fn hasNodeModules(this: *const DirInfo) bool {
+ return this.flags.contains(.has_node_modules);
+}
+/// Is this a "node_modules" directory?
+pub inline fn isNodeModules(this: *const DirInfo) bool {
+ return this.flags.contains(.is_node_modules);
+}
+
+pub const Flags = enum {
+ /// This directory is a node_modules directory
+ is_node_modules,
+ /// This directory has a node_modules subdirectory
+ has_node_modules,
+
+ pub const Set = std.enums.EnumSet(Flags);
+};
+
pub fn hasParentPackage(this: *const DirInfo) bool {
const parent = this.getParent() orelse return false;
- return !parent.is_node_modules;
+ return !parent.isNodeModules();
}
pub fn getFileDescriptor(dirinfo: *const DirInfo) StoredFileDescriptorType {
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index e33c5ac60..034debe59 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -28,6 +28,26 @@ pub const MacroImportReplacementMap = std.StringArrayHashMap(string);
pub const MacroMap = std.StringArrayHashMapUnmanaged(MacroImportReplacementMap);
const ScriptsMap = std.StringArrayHashMap(string);
+const Semver = @import("../install/semver.zig");
+const Dependency = @import("../install/dependency.zig");
+const String = @import("../install/semver.zig").String;
+const Version = Semver.Version;
+const Install = @import("../install/install.zig");
+const FolderResolver = @import("../install/resolvers/folder_resolver.zig");
+
+const Architecture = @import("../install/npm.zig").Architecture;
+const OperatingSystem = @import("../install/npm.zig").OperatingSystem;
+pub const DependencyMap = struct {
+ map: HashMap = .{},
+ source_buf: []const u8 = "",
+
+ pub const HashMap = std.ArrayHashMapUnmanaged(
+ String,
+ Dependency,
+ String.ArrayHashContext,
+ false,
+ );
+};
pub const PackageJSON = struct {
pub const LoadFramework = enum {
@@ -85,6 +105,12 @@ pub const PackageJSON = struct {
scripts: ?*ScriptsMap = null,
+ arch: Architecture = Architecture.all,
+ os: OperatingSystem = OperatingSystem.all,
+
+ package_manager_package_id: Install.PackageID = Install.invalid_package_id,
+ dependencies: DependencyMap = .{},
+
// Present if the "browser" field is present. This field is intended to be
// used by bundlers and lets you redirect the paths of certain 3rd-party
// modules that don't work in the browser to other modules that shim that
@@ -538,12 +564,13 @@ pub const PackageJSON = struct {
}
pub fn parse(
- comptime ResolverType: type,
- r: *ResolverType,
+ r: *resolver.Resolver,
input_path: string,
dirname_fd: StoredFileDescriptorType,
- comptime generate_hash: bool,
+ package_id: ?Install.PackageID,
comptime include_scripts: bool,
+ comptime include_dependencies: @Type(.EnumLiteral),
+ comptime generate_hash: bool,
) ?PackageJSON {
// TODO: remove this extra copy
@@ -566,7 +593,7 @@ pub const PackageJSON = struct {
};
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The file \"{s}\" exists", .{package_json_path}) catch unreachable;
+ debug.addNoteFmt("The file \"{s}\" exists", .{package_json_path});
}
const key_path = fs.Path.init(package_json_path);
@@ -716,6 +743,160 @@ pub const PackageJSON = struct {
}
}
+ if (comptime include_dependencies == .main or include_dependencies == .local) {
+ update_dependencies: {
+ if (package_id) |pkg| {
+ package_json.package_manager_package_id = pkg;
+ break :update_dependencies;
+ }
+
+ // // if there is a name & version, check if the lockfile has the package
+ if (package_json.name.len > 0 and package_json.version.len > 0) {
+ if (r.package_manager) |pm| {
+ const tag = Dependency.Version.Tag.infer(package_json.version);
+
+ if (tag == .npm) {
+ const sliced = Semver.SlicedString.init(package_json.version, package_json.version);
+ if (Dependency.parseWithTag(r.allocator, package_json.version, .npm, &sliced, r.log)) |dependency_version| {
+ if (dependency_version.value.npm.isExact()) {
+ if (pm.lockfile.resolve(package_json.name, dependency_version)) |resolved| {
+ package_json.package_manager_package_id = resolved;
+ if (resolved > 0) {
+ break :update_dependencies;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ if (json.get("cpu")) |os_field| {
+ var first = true;
+ if (os_field.asArray()) |*array| {
+ while (array.next()) |item| {
+ if (item.asString(bun.default_allocator)) |str| {
+ if (first) {
+ package_json.arch = Architecture.none;
+ first = false;
+ }
+ package_json.arch = package_json.arch.apply(str);
+ }
+ }
+ }
+ }
+
+ if (json.get("os")) |os_field| {
+ var first = true;
+ if (os_field.asArray()) |*array| {
+ while (array.next()) |item| {
+ if (item.asString(bun.default_allocator)) |str| {
+ if (first) {
+ package_json.os = OperatingSystem.none;
+ first = false;
+ }
+ package_json.os = package_json.os.apply(str);
+ }
+ }
+ }
+ }
+
+ const DependencyGroup = Install.Lockfile.Package.DependencyGroup;
+ const features = .{
+ .dependencies = true,
+ .dev_dependencies = include_dependencies == .main,
+ .optional_dependencies = false,
+ .peer_dependencies = false,
+ };
+
+ const dependency_groups = comptime brk: {
+ var out_groups: [
+ @as(usize, @boolToInt(features.dependencies)) +
+ @as(usize, @boolToInt(features.dev_dependencies)) +
+ @as(usize, @boolToInt(features.optional_dependencies)) +
+ @as(usize, @boolToInt(features.peer_dependencies))
+ ]DependencyGroup = undefined;
+ var out_group_i: usize = 0;
+ if (features.dependencies) {
+ out_groups[out_group_i] = DependencyGroup.dependencies;
+ out_group_i += 1;
+ }
+
+ if (features.dev_dependencies) {
+ out_groups[out_group_i] = DependencyGroup.dev;
+ out_group_i += 1;
+ }
+ if (features.optional_dependencies) {
+ out_groups[out_group_i] = DependencyGroup.optional;
+ out_group_i += 1;
+ }
+
+ if (features.peer_dependencies) {
+ out_groups[out_group_i] = DependencyGroup.peer;
+ out_group_i += 1;
+ }
+
+ break :brk out_groups;
+ };
+
+ var total_dependency_count: usize = 0;
+ inline for (dependency_groups) |group| {
+ if (json.get(group.field)) |group_json| {
+ if (group_json.data == .e_object) {
+ total_dependency_count += group_json.data.e_object.properties.len;
+ }
+ }
+ }
+
+ if (total_dependency_count > 0) {
+ package_json.dependencies.map = DependencyMap.HashMap{};
+ package_json.dependencies.source_buf = json_source.contents;
+ const ctx = String.ArrayHashContext{
+ .a_buf = json_source.contents,
+ .b_buf = json_source.contents,
+ };
+ package_json.dependencies.map.ensureTotalCapacityContext(
+ r.allocator,
+ total_dependency_count,
+ ctx,
+ ) catch unreachable;
+
+ inline for (dependency_groups) |group| {
+ if (json.get(group.field)) |group_json| {
+ if (group_json.data == .e_object) {
+ var group_obj = group_json.data.e_object;
+ for (group_obj.properties.slice()) |*prop| {
+ const name = prop.key orelse continue;
+ const name_str = name.asString(r.allocator) orelse continue;
+ const version_value = prop.value orelse continue;
+ const version_str = version_value.asString(r.allocator) orelse continue;
+ const sliced_str = Semver.SlicedString.init(version_str, version_str);
+
+ if (Dependency.parse(
+ r.allocator,
+ version_str,
+ &sliced_str,
+ r.log,
+ )) |dependency_version| {
+ const dependency = Dependency{
+ .name = String.init(name_str, name_str),
+ .version = dependency_version,
+ .name_hash = bun.hash(name_str),
+ .behavior = group.behavior,
+ };
+ package_json.dependencies.map.putAssumeCapacityContext(
+ dependency.name,
+ dependency,
+ ctx,
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
// used by `bun run`
if (include_scripts) {
read_scripts: {
@@ -1043,8 +1224,49 @@ pub const ESModule = struct {
pub const Package = struct {
name: string,
+ version: string = "",
subpath: string,
+ pub const External = struct {
+ name: Semver.String = .{},
+ version: Semver.String = .{},
+ subpath: Semver.String = .{},
+ };
+
+ pub fn count(this: Package, builder: *Semver.String.Builder) void {
+ builder.count(this.name);
+ builder.count(this.version);
+ builder.count(this.subpath);
+ }
+
+ pub fn clone(this: Package, builder: *Semver.String.Builder) External {
+ return .{
+ .name = builder.appendUTF8WithoutPool(Semver.String, this.name, 0),
+ .version = builder.appendUTF8WithoutPool(Semver.String, this.version, 0),
+ .subpath = builder.appendUTF8WithoutPool(Semver.String, this.subpath, 0),
+ };
+ }
+
+ pub fn toExternal(this: Package, buffer: []const u8) External {
+ return .{
+ .name = Semver.String.init(buffer, this.name),
+ .version = Semver.String.init(buffer, this.version),
+ .subpath = Semver.String.init(buffer, this.subpath),
+ };
+ }
+
+ pub fn withAutoVersion(this: Package) Package {
+ if (this.version.len == 0) {
+ return .{
+ .name = this.name,
+ .subpath = this.subpath,
+ .version = ">=0.0.0",
+ };
+ }
+
+ return this;
+ }
+
pub fn parseName(specifier: string) ?string {
var slash = strings.indexOfCharNeg(specifier, '/');
if (!strings.startsWithChar(specifier, '@')) {
@@ -1059,6 +1281,27 @@ pub const ESModule = struct {
}
}
+ pub fn parseVersion(specifier_after_name: string) ?string {
+ if (strings.indexOfChar(specifier_after_name, '/')) |slash| {
+ // "foo@/bar" is not a valid specifier\
+ // "foo@/" is not a valid specifier
+ // "foo/@/bar" is not a valid specifier
+ // "foo@1/bar" is a valid specifier
+ // "foo@^123.2.3+ba-ab/bar" is a valid specifier
+ // ^^^^^^^^^^^^^^
+ // this is the version
+
+ const remainder = specifier_after_name[0..slash];
+ if (remainder.len > 0 and remainder[0] == '@') {
+ return remainder[1..];
+ }
+
+ return remainder;
+ }
+
+ return null;
+ }
+
pub fn parse(specifier: string, subpath_buf: []u8) ?Package {
if (specifier.len == 0) return null;
var package = Package{ .name = parseName(specifier) orelse return null, .subpath = "" };
@@ -1066,11 +1309,30 @@ pub const ESModule = struct {
if (strings.startsWith(package.name, ".") or strings.indexAnyComptime(package.name, "\\%") != null)
return null;
- std.mem.copy(u8, subpath_buf[1..], specifier[package.name.len..]);
- subpath_buf[0] = '.';
- package.subpath = subpath_buf[0 .. specifier[package.name.len..].len + 1];
+ const offset: usize = if (package.name.len == 0 or package.name[0] != '@') 0 else 1;
+ if (strings.indexOfChar(specifier[offset..], '@')) |at| {
+ package.version = parseVersion(specifier[offset..][at..]) orelse "";
+ if (package.version.len == 0) {
+ package.version = specifier[offset..][at..];
+ if (package.version.len > 0 and package.version[0] == '@') {
+ package.version = package.version[1..];
+ }
+ }
+ package.name = specifier[0 .. at + offset];
+
+ parseSubpath(&package.subpath, specifier[@minimum(package.name.len + package.version.len + 1, specifier.len)..], subpath_buf);
+ } else {
+ parseSubpath(&package.subpath, specifier[package.name.len..], subpath_buf);
+ }
+
return package;
}
+
+ pub fn parseSubpath(subpath: *[]const u8, specifier: string, subpath_buf: []u8) void {
+ std.mem.copy(u8, subpath_buf[1..], specifier);
+ subpath_buf[0] = '.';
+ subpath.* = subpath_buf[0 .. specifier.len + 1];
+ }
};
const ReverseKind = enum { exact, pattern, prefix };
@@ -1170,7 +1432,7 @@ pub const ESModule = struct {
) Resolution {
if (exports.data == .invalid) {
if (r.debug_logs) |logs| {
- logs.addNote("Invalid package configuration") catch unreachable;
+ logs.addNote("Invalid package configuration");
}
return Resolution{ .status = .InvalidPackageConfiguration, .debug = .{ .token = exports.first_token } };
@@ -1210,7 +1472,7 @@ pub const ESModule = struct {
}
if (r.debug_logs) |logs| {
- logs.addNoteFmt("The path \"{s}\" was not exported", .{subpath}) catch unreachable;
+ logs.addNoteFmt("The path \"{s}\" was not exported", .{subpath});
}
return Resolution{ .status = .PackagePathNotExported, .debug = .{ .token = exports.first_token } };
@@ -1224,13 +1486,13 @@ pub const ESModule = struct {
package_url: string,
) Resolution {
if (r.debug_logs) |logs| {
- logs.addNoteFmt("Checking object path map for \"{s}\"", .{match_key}) catch unreachable;
+ logs.addNoteFmt("Checking object path map for \"{s}\"", .{match_key});
}
if (!strings.endsWithChar(match_key, '.')) {
if (match_obj.valueForKey(match_key)) |target| {
if (r.debug_logs) |log| {
- log.addNoteFmt("Found \"{s}\"", .{match_key}) catch unreachable;
+ log.addNoteFmt("Found \"{s}\"", .{match_key});
}
return r.resolveTarget(package_url, target, "", is_imports, false);
@@ -1248,7 +1510,7 @@ pub const ESModule = struct {
const target = expansion.value;
const subpath = match_key[expansion.key.len - 1 ..];
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath }) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath });
}
return r.resolveTarget(package_url, target, subpath, is_imports, true);
@@ -1259,7 +1521,7 @@ pub const ESModule = struct {
const target = expansion.value;
const subpath = match_key[expansion.key.len..];
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath }) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath });
}
var result = r.resolveTarget(package_url, target, subpath, is_imports, false);
@@ -1273,13 +1535,13 @@ pub const ESModule = struct {
}
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" did not match", .{expansion.key}) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" did not match", .{expansion.key});
}
}
}
if (r.debug_logs) |log| {
- log.addNoteFmt("No keys matched \"{s}\"", .{match_key}) catch unreachable;
+ log.addNoteFmt("No keys matched \"{s}\"", .{match_key});
}
return Resolution{
@@ -1301,12 +1563,12 @@ pub const ESModule = struct {
switch (target.data) {
.string => |str| {
if (r.debug_logs) |log| {
- log.addNoteFmt("Checking path \"{s}\" against target \"{s}\"", .{ subpath, str }) catch unreachable;
- log.increaseIndent() catch unreachable;
+ log.addNoteFmt("Checking path \"{s}\" against target \"{s}\"", .{ subpath, str });
+ log.increaseIndent();
}
defer {
if (r.debug_logs) |log| {
- log.decreaseIndent() catch unreachable;
+ log.decreaseIndent();
}
}
@@ -1315,7 +1577,7 @@ pub const ESModule = struct {
if (comptime !pattern) {
if (subpath.len > 0 and !strings.endsWithChar(str, '/')) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it doesn't end with a \"/\"", .{str}) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it doesn't end with a \"/\"", .{str});
}
return Resolution{ .path = str, .status = .InvalidModuleSpecifier, .debug = .{ .token = target.first_token } };
@@ -1325,7 +1587,7 @@ pub const ESModule = struct {
// If target does not start with "./", then...
if (!strings.startsWith(str, "./")) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it doesn't start with a \"./\"", .{str}) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it doesn't start with a \"./\"", .{str});
}
if (internal and !strings.hasPrefixComptime(str, "../") and !strings.hasPrefix(str, "/")) {
@@ -1335,7 +1597,7 @@ pub const ESModule = struct {
_ = std.mem.replace(u8, str, "*", subpath, &resolve_target_buf2);
const result = resolve_target_buf2[0..len];
if (r.debug_logs) |log| {
- log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, str, result }) catch unreachable;
+ log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, str, result });
}
return Resolution{ .path = result, .status = .PackageResolve, .debug = .{ .token = target.first_token } };
@@ -1343,7 +1605,7 @@ pub const ESModule = struct {
var parts2 = [_]string{ str, subpath };
const result = resolve_path.joinStringBuf(&resolve_target_buf2, parts2, .auto);
if (r.debug_logs) |log| {
- log.addNoteFmt("Resolved \".{s}\" to \".{s}\"", .{ str, result }) catch unreachable;
+ log.addNoteFmt("Resolved \".{s}\" to \".{s}\"", .{ str, result });
}
return Resolution{ .path = result, .status = .PackageResolve, .debug = .{ .token = target.first_token } };
@@ -1357,7 +1619,7 @@ pub const ESModule = struct {
// segments after the first segment, throw an Invalid Package Target error.
if (findInvalidSegment(str)) |invalid| {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid }) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid });
}
return Resolution{ .path = str, .status = .InvalidPackageTarget, .debug = .{ .token = target.first_token } };
@@ -1371,7 +1633,7 @@ pub const ESModule = struct {
// segments after the first segment, throw an Invalid Package Target error.
if (findInvalidSegment(resolved_target)) |invalid| {
if (r.debug_logs) |log| {
- log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid }) catch unreachable;
+ log.addNoteFmt("The target \"{s}\" is invalid because it contains an invalid segment \"{s}\"", .{ str, invalid });
}
return Resolution{ .path = str, .status = .InvalidModuleSpecifier, .debug = .{ .token = target.first_token } };
@@ -1383,7 +1645,7 @@ pub const ESModule = struct {
_ = std.mem.replace(u8, resolved_target, "*", subpath, &resolve_target_buf2);
const result = resolve_target_buf2[0..len];
if (r.debug_logs) |log| {
- log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result }) catch unreachable;
+ log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result });
}
return Resolution{ .path = result, .status = .Exact, .debug = .{ .token = target.first_token } };
@@ -1391,7 +1653,7 @@ pub const ESModule = struct {
var parts2 = [_]string{ package_url, str, subpath };
const result = resolve_path.joinStringBuf(&resolve_target_buf2, parts2, .auto);
if (r.debug_logs) |log| {
- log.addNoteFmt("Substituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result }) catch unreachable;
+ log.addNoteFmt("Substituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, resolved_target, result });
}
return Resolution{ .path = result, .status = .Exact, .debug = .{ .token = target.first_token } };
@@ -1406,7 +1668,7 @@ pub const ESModule = struct {
for (keys) |key, i| {
if (strings.eqlComptime(key, "default") or r.conditions.contains(key)) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" matched", .{key}) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" matched", .{key});
}
var result = r.resolveTarget(package_url, slice.items(.value)[i], subpath, internal, pattern);
@@ -1420,12 +1682,12 @@ pub const ESModule = struct {
}
if (r.debug_logs) |log| {
- log.addNoteFmt("The key \"{s}\" did not match", .{key}) catch unreachable;
+ log.addNoteFmt("The key \"{s}\" did not match", .{key});
}
}
if (r.debug_logs) |log| {
- log.addNoteFmt("No keys matched", .{}) catch unreachable;
+ log.addNoteFmt("No keys matched", .{});
}
var return_target = target;
@@ -1489,7 +1751,7 @@ pub const ESModule = struct {
.array => |array| {
if (array.len == 0) {
if (r.debug_logs) |log| {
- log.addNoteFmt("The path \"{s}\" is an empty array", .{subpath}) catch unreachable;
+ log.addNoteFmt("The path \"{s}\" is an empty array", .{subpath});
}
return Resolution{ .path = "", .status = .Null, .debug = .{ .token = target.first_token } };
@@ -1517,7 +1779,7 @@ pub const ESModule = struct {
},
.@"null" => {
if (r.debug_logs) |log| {
- log.addNoteFmt("The path \"{s}\" is null", .{subpath}) catch unreachable;
+ log.addNoteFmt("The path \"{s}\" is null", .{subpath});
}
return Resolution{ .path = "", .status = .Null, .debug = .{ .token = target.first_token } };
@@ -1526,7 +1788,7 @@ pub const ESModule = struct {
}
if (r.debug_logs) |logs| {
- logs.addNoteFmt("Invalid package target for path \"{s}\"", .{subpath}) catch unreachable;
+ logs.addNoteFmt("Invalid package target for path \"{s}\"", .{subpath});
}
return Resolution{ .status = .InvalidPackageTarget, .debug = .{ .token = target.first_token } };
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 2c14089ee..a6e6f9b94 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -38,6 +38,14 @@ const allocators = @import("../allocators.zig");
const Msg = logger.Msg;
const Path = Fs.Path;
const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle;
+const PackageManager = @import("../install/install.zig").PackageManager;
+const Dependency = @import("../install/dependency.zig");
+const Install = @import("../install/install.zig");
+const Lockfile = @import("../install/lockfile.zig").Lockfile;
+const Package = @import("../install/lockfile.zig").Package;
+const Resolution = @import("../install/resolution.zig").Resolution;
+const Semver = @import("../install/semver.zig");
+const DotEnv = @import("../env_loader.zig");
pub fn isPackagePath(path: string) bool {
// this could probably be flattened into something more optimized
@@ -126,6 +134,13 @@ pub const Result = struct {
file_fd: StoredFileDescriptorType = 0,
import_kind: ast.ImportKind = undefined,
+ pub const Union = union(enum) {
+ success: Result,
+ failure: anyerror,
+ pending: PendingResolution,
+ not_found: void,
+ };
+
pub fn path(this: *Result) ?*Path {
if (!this.path_pair.primary.is_disabled)
return &this.path_pair.primary;
@@ -235,6 +250,7 @@ threadlocal var remap_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var load_as_file_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var remap_path_trailing_slash: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var tsconfig_paths_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+threadlocal var path_in_global_disk_cache_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
pub const DebugLogs = struct {
what: string = "",
@@ -256,33 +272,33 @@ pub const DebugLogs = struct {
// d.indent.deinit();
}
- pub fn increaseIndent(d: *DebugLogs) !void {
+ pub fn increaseIndent(d: *DebugLogs) void {
@setCold(true);
- try d.indent.append(" ");
+ d.indent.append(" ") catch unreachable;
}
- pub fn decreaseIndent(d: *DebugLogs) !void {
+ pub fn decreaseIndent(d: *DebugLogs) void {
@setCold(true);
d.indent.list.shrinkRetainingCapacity(d.indent.list.items.len - 1);
}
- pub fn addNote(d: *DebugLogs, _text: string) !void {
+ pub fn addNote(d: *DebugLogs, _text: string) void {
@setCold(true);
var text = _text;
const len = d.indent.len();
if (len > 0) {
- var __text = try d.notes.allocator.alloc(u8, text.len + len);
+ var __text = d.notes.allocator.alloc(u8, text.len + len) catch unreachable;
std.mem.copy(u8, __text, d.indent.list.items);
std.mem.copy(u8, __text[len..__text.len], _text);
d.notes.allocator.free(_text);
}
- try d.notes.append(logger.rangeData(null, logger.Range.None, text));
+ d.notes.append(logger.rangeData(null, logger.Range.None, text)) catch unreachable;
}
- pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) !void {
+ pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) void {
@setCold(true);
- return try d.addNote(try std.fmt.allocPrint(d.notes.allocator, fmt, args));
+ return d.addNote(std.fmt.allocPrint(d.notes.allocator, fmt, args) catch unreachable);
}
};
@@ -294,6 +310,62 @@ pub const MatchResult = struct {
package_json: ?*PackageJSON = null,
diff_case: ?Fs.FileSystem.Entry.Lookup.DifferentCase = null,
dir_info: ?*DirInfo = null,
+
+ pub const Union = union(enum) {
+ not_found: void,
+ success: MatchResult,
+ pending: PendingResolution,
+ failure: anyerror,
+ };
+};
+
+pub const PendingResolution = struct {
+ esm: ESModule.Package.External = .{},
+ dependency: Dependency.Version = .{},
+ resolution_id: Install.PackageID = Install.invalid_package_id,
+ root_dependency_id: Install.PackageID = Install.invalid_package_id,
+ import_record_id: u32 = std.math.maxInt(u32),
+ string_buf: []u8 = "",
+ tag: Tag,
+
+ pub const List = std.MultiArrayList(PendingResolution);
+
+ pub fn deinitListItems(list_: List, allocator: std.mem.Allocator) void {
+ var list = list_;
+ var dependencies = list.items(.dependency);
+ var string_bufs = list.items(.string_buf);
+ for (dependencies) |*dependency| {
+ dependency.deinit();
+ }
+
+ for (string_bufs) |string_buf| {
+ allocator.free(string_buf);
+ }
+ }
+
+ pub fn deinit(this: *PendingResolution, allocator: std.mem.Allocator) void {
+ this.dependency.deinit();
+ allocator.free(this.string_buf);
+ }
+
+ pub const Tag = enum {
+ download,
+ resolve,
+ done,
+ };
+
+ pub fn init(
+ allocator: std.mem.Allocator,
+ esm: ESModule.Package,
+ dependency: Dependency.Version,
+ resolution_id: Install.PackageID,
+ ) !PendingResolution {
+ return PendingResolution{
+ .esm = try esm.copy(allocator),
+ .dependency = dependency,
+ .resolution_id = resolution_id,
+ };
+ }
};
pub const LoadResult = struct {
@@ -358,6 +430,11 @@ pub const Resolver = struct {
caches: CacheSet,
+ package_manager: ?*PackageManager = null,
+ onWakePackageManager: PackageManager.WakeHandler = .{},
+ main_file_for_package_manager: []const u8 = "",
+ env_loader: ?*DotEnv.Loader = null,
+
// These are sets that represent various conditions for the "exports" field
// in package.json.
// esm_conditions_default: std.StringHashMap(bool),
@@ -402,6 +479,27 @@ pub const Resolver = struct {
// all parent directories
dir_cache: *DirInfo.HashMap,
+ pub fn getPackageManager(this: *Resolver) *PackageManager {
+ if (this.package_manager != null) {
+ return this.package_manager.?;
+ }
+ bun.HTTPThead.init() catch unreachable;
+ this.package_manager = PackageManager.initWithRuntime(
+ this.log,
+ this.opts.install,
+ this.allocator,
+ .{},
+ this.env_loader.?,
+ ) catch @panic("Failed to initialize package manager");
+ this.package_manager.?.onWake = this.onWakePackageManager;
+
+ return this.package_manager.?;
+ }
+
+ pub inline fn usePackageManager(self: *const ThisResolver) bool {
+ return self.opts.global_cache.isEnabled();
+ }
+
pub fn init1(
allocator: std.mem.Allocator,
log: *logger.Log,
@@ -590,7 +688,13 @@ pub const Resolver = struct {
}
}
- pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result {
+ pub fn resolveAndAutoInstall(
+ r: *ThisResolver,
+ source_dir: string,
+ import_path: string,
+ kind: ast.ImportKind,
+ global_cache: GlobalCache,
+ ) Result.Union {
const original_order = r.extension_order;
defer r.extension_order = original_order;
r.extension_order = switch (kind) {
@@ -613,10 +717,10 @@ pub const Resolver = struct {
r.debug_logs.?.deinit();
}
- r.debug_logs = try DebugLogs.init(r.allocator);
+ r.debug_logs = DebugLogs.init(r.allocator) catch unreachable;
}
- if (import_path.len == 0) return error.ModuleNotFound;
+ if (import_path.len == 0) return .{ .not_found = {} };
// Certain types of URLs default to being external for convenience
if (r.isExternalPattern(import_path) or
@@ -633,17 +737,19 @@ pub const Resolver = struct {
strings.startsWith(import_path, "//"))
{
if (r.debug_logs) |*debug| {
- try debug.addNote("Marking this path as implicitly external");
+ debug.addNote("Marking this path as implicitly external");
r.flushDebugLogs(.success) catch {};
}
- return Result{
- .import_kind = kind,
- .path_pair = PathPair{
- .primary = Path.init(import_path),
+ return .{
+ .success = Result{
+ .import_kind = kind,
+ .path_pair = PathPair{
+ .primary = Path.init(import_path),
+ },
+ .is_external = true,
+ .module_type = .esm,
},
- .is_external = true,
- .module_type = .esm,
};
}
@@ -653,22 +759,26 @@ pub const Resolver = struct {
// "@import 'data:text/css,body{background:white}';"
if (data_url.decode_mime_type() != .Unsupported) {
if (r.debug_logs) |*debug| {
- debug.addNote("Putting this path in the \"dataurl\" namespace") catch {};
+ debug.addNote("Putting this path in the \"dataurl\" namespace");
r.flushDebugLogs(.success) catch {};
}
- return Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } };
+ return .{
+ .success = Result{ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") } },
+ };
}
// "background: url(data:image/png;base64,iVBORw0KGgo=);"
if (r.debug_logs) |*debug| {
- debug.addNote("Marking this \"dataurl\" as external") catch {};
+ debug.addNote("Marking this \"dataurl\" as external");
r.flushDebugLogs(.success) catch {};
}
- return Result{
- .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = PathPair{ .primary = Path.initWithNamespace(import_path, "dataurl") },
+ .is_external = true,
+ },
};
}
@@ -676,27 +786,48 @@ pub const Resolver = struct {
// virtual modules (e.g. stdin) if a resolve directory is not specified.
if (source_dir.len == 0) {
if (r.debug_logs) |*debug| {
- debug.addNote("Cannot resolve this path without a directory") catch {};
+ debug.addNote("Cannot resolve this path without a directory");
r.flushDebugLogs(.fail) catch {};
}
- return error.MissingResolveDir;
+ return .{ .failure = error.MissingResolveDir };
}
// r.mutex.lock();
// defer r.mutex.unlock();
errdefer (r.flushDebugLogs(.fail) catch {});
- var result = (try r.resolveWithoutSymlinks(source_dir, import_path, kind)) orelse {
- r.flushDebugLogs(.fail) catch {};
- return error.ModuleNotFound;
- };
- if (!strings.eqlComptime(result.path_pair.primary.namespace, "node"))
- try r.finalizeResult(&result, kind);
+ switch (r.resolveWithoutSymlinks(source_dir, import_path, kind, global_cache)) {
+ .success => |*result| {
+ if (!strings.eqlComptime(result.path_pair.primary.namespace, "node"))
+ r.finalizeResult(result, kind) catch |err| return .{ .failure = err };
- r.flushDebugLogs(.success) catch {};
- result.import_kind = kind;
- return result;
+ r.flushDebugLogs(.success) catch {};
+ result.import_kind = kind;
+ return .{ .success = result.* };
+ },
+ .failure => |e| {
+ r.flushDebugLogs(.fail) catch {};
+ return .{ .failure = e };
+ },
+ .pending => |pending| {
+ r.flushDebugLogs(.fail) catch {};
+ return .{ .pending = pending };
+ },
+ .not_found => {
+ r.flushDebugLogs(.fail) catch {};
+ return .{ .not_found = {} };
+ },
+ }
+ }
+
+ pub fn resolve(r: *ThisResolver, source_dir: string, import_path: string, kind: ast.ImportKind) !Result {
+ switch (r.resolveAndAutoInstall(source_dir, import_path, kind, GlobalCache.disable)) {
+ .success => |result| return result,
+ .pending, .not_found => return error.ModuleNotFound,
+
+ .failure => |e| return e,
+ }
}
const ModuleTypeMap = bun.ComptimeStringMap(options.ModuleType, .{
@@ -738,7 +869,7 @@ pub const Resolver = struct {
if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path }) catch {};
+ debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ path.text, symlink_path });
}
} else if (dir.abs_real_path.len > 0) {
var parts = [_]string{ dir.abs_real_path, query.entry.base() };
@@ -776,7 +907,7 @@ pub const Resolver = struct {
const symlink = try Fs.FileSystem.FilenameStore.instance.append(@TypeOf(out), out);
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text }) catch {};
+ debug.addNoteFmt("Resolved symlink \"{s}\" to \"{s}\"", .{ symlink, path.text });
}
query.entry.cache.symlink = PathString.init(symlink);
if (result.file_fd == 0) result.file_fd = query.entry.cache.fd;
@@ -796,7 +927,13 @@ pub const Resolver = struct {
result.module_type = module_type;
}
- pub fn resolveWithoutSymlinks(r: *ThisResolver, source_dir: string, import_path_: string, kind: ast.ImportKind) !?Result {
+ pub fn resolveWithoutSymlinks(
+ r: *ThisResolver,
+ source_dir: string,
+ import_path_: string,
+ kind: ast.ImportKind,
+ global_cache: GlobalCache,
+ ) Result.Union {
var import_path = import_path_;
// This implements the module resolution algorithm from node.js, which is
@@ -819,7 +956,7 @@ pub const Resolver = struct {
// users will not be able to accidentally make use of these paths.
if (strings.startsWith(import_path, "/") or std.fs.path.isAbsolutePosix(import_path)) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path}) catch {};
+ debug.addNoteFmt("The import \"{s}\" is being treated as an absolute path", .{import_path});
}
// First, check path overrides from the nearest enclosing TypeScript "tsconfig.json" file
@@ -830,13 +967,15 @@ pub const Resolver = struct {
if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
// We don't set the directory fd here because it might remap an entirely different directory
- return Result{
- .path_pair = res.path_pair,
- .diff_case = res.diff_case,
- .package_json = res.package_json,
- .dirname_fd = res.dirname_fd,
- .file_fd = res.file_fd,
- .jsx = tsconfig.mergeJSX(result.jsx),
+ return .{
+ .success = Result{
+ .path_pair = res.path_pair,
+ .diff_case = res.diff_case,
+ .package_json = res.package_json,
+ .dirname_fd = res.dirname_fd,
+ .file_fd = res.file_fd,
+ .jsx = tsconfig.mergeJSX(result.jsx),
+ },
};
}
}
@@ -849,28 +988,32 @@ pub const Resolver = struct {
// That way we preserve the literal text in the output and don't generate
// a relative path from the output directory to that path.
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path}) catch {};
+ debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{import_path});
}
- return Result{
- .path_pair = .{ .primary = Path.init(import_path) },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = .{ .primary = Path.init(import_path) },
+ .is_external = true,
+ },
};
}
// Run node's resolution rules (e.g. adding ".js")
if (r.loadAsFileOrDirectory(import_path, kind)) |entry| {
- return Result{
- .dirname_fd = entry.dirname_fd,
- .path_pair = entry.path_pair,
- .diff_case = entry.diff_case,
- .package_json = entry.package_json,
- .file_fd = entry.file_fd,
- .jsx = r.opts.jsx,
+ return .{
+ .success = Result{
+ .dirname_fd = entry.dirname_fd,
+ .path_pair = entry.path_pair,
+ .diff_case = entry.diff_case,
+ .package_json = entry.package_json,
+ .file_fd = entry.file_fd,
+ .jsx = r.opts.jsx,
+ },
};
}
- return null;
+ return .{ .not_found = {} };
}
// Check both relative and package paths for CSS URL tokens, with relative
@@ -889,12 +1032,14 @@ pub const Resolver = struct {
// That way we preserve the literal text in the output and don't generate
// a relative path from the output directory to that path.
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path}) catch {};
+ debug.addNoteFmt("The path \"{s}\" is marked as external by the user", .{abs_path});
}
- return Result{
- .path_pair = .{ .primary = Path.init(r.fs.dirname_store.append(@TypeOf(abs_path), abs_path) catch unreachable) },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = .{ .primary = Path.init(r.fs.dirname_store.append(@TypeOf(abs_path), abs_path) catch unreachable) },
+ .is_external = true,
+ },
};
}
@@ -912,23 +1057,28 @@ pub const Resolver = struct {
if (remap.len == 0) {
var _path = Path.init(r.fs.dirname_store.append(string, abs_path) catch unreachable);
_path.is_disabled = true;
- return Result{
- .path_pair = PathPair{
- .primary = _path,
+ return .{
+ .success = Result{
+ .path_pair = PathPair{
+ .primary = _path,
+ },
},
};
}
- if (r.resolveWithoutRemapping(import_dir_info, remap, kind)) |_result| {
- result = Result{
- .path_pair = _result.path_pair,
- .diff_case = _result.diff_case,
- .dirname_fd = _result.dirname_fd,
- .package_json = pkg,
- .jsx = r.opts.jsx,
- };
- check_relative = false;
- check_package = false;
+ switch (r.resolveWithoutRemapping(import_dir_info, remap, kind, global_cache)) {
+ .success => |_result| {
+ result = Result{
+ .path_pair = _result.path_pair,
+ .diff_case = _result.diff_case,
+ .dirname_fd = _result.dirname_fd,
+ .package_json = pkg,
+ .jsx = r.opts.jsx,
+ };
+ check_relative = false;
+ check_package = false;
+ },
+ else => {},
}
}
}
@@ -945,7 +1095,7 @@ pub const Resolver = struct {
.jsx = r.opts.jsx,
};
} else if (!check_package) {
- return null;
+ return .{ .not_found = {} };
}
}
}
@@ -966,7 +1116,7 @@ pub const Resolver = struct {
result.module_type = .cjs;
result.package_json = @intToPtr(*PackageJSON, @ptrToInt(fallback_module.package_json));
result.is_from_node_modules = true;
- return result;
+ return .{ .success = result };
// "node:*
// "fs"
// "fs/*"
@@ -982,7 +1132,7 @@ pub const Resolver = struct {
result.module_type = .cjs;
result.path_pair.primary.is_disabled = true;
result.is_from_node_modules = true;
- return result;
+ return .{ .success = result };
}
}
@@ -992,11 +1142,13 @@ pub const Resolver = struct {
while (true) {
if (r.opts.external.node_modules.contains(query)) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query}) catch {};
+ debug.addNoteFmt("The path \"{s}\" was marked as external by the user", .{query});
}
- return Result{
- .path_pair = .{ .primary = Path.init(query) },
- .is_external = true,
+ return .{
+ .success = Result{
+ .path_pair = .{ .primary = Path.init(query) },
+ .is_external = true,
+ },
};
}
@@ -1007,7 +1159,7 @@ pub const Resolver = struct {
}
}
- var source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return null;
+ var source_dir_info = (r.dirInfoCached(source_dir) catch null) orelse return .{ .not_found = {} };
// Support remapping one package path to another via the "browser" field
if (source_dir_info.getEnclosingBrowserScope()) |browser_scope| {
@@ -1020,30 +1172,37 @@ pub const Resolver = struct {
if (remapped.len == 0) {
// "browser": {"module": false}
// does the module exist in the filesystem?
- if (r.loadNodeModules(import_path, kind, source_dir_info, false)) |node_module| {
- var pair = node_module.path_pair;
- pair.primary.is_disabled = true;
- if (pair.secondary != null) {
- pair.secondary.?.is_disabled = true;
- }
- return Result{
- .path_pair = pair,
- .dirname_fd = node_module.dirname_fd,
- .diff_case = node_module.diff_case,
- .package_json = package_json,
- .jsx = r.opts.jsx,
- };
- } else {
- // "browser": {"module": false}
- // the module doesn't exist and it's disabled
- // so we should just not try to load it
- var primary = Path.init(import_path);
- primary.is_disabled = true;
- return Result{
- .path_pair = PathPair{ .primary = primary },
- .diff_case = null,
- .jsx = r.opts.jsx,
- };
+ switch (r.loadNodeModules(import_path, kind, source_dir_info, global_cache, false)) {
+ .success => |node_module| {
+ var pair = node_module.path_pair;
+ pair.primary.is_disabled = true;
+ if (pair.secondary != null) {
+ pair.secondary.?.is_disabled = true;
+ }
+ return .{
+ .success = Result{
+ .path_pair = pair,
+ .dirname_fd = node_module.dirname_fd,
+ .diff_case = node_module.diff_case,
+ .package_json = package_json,
+ .jsx = r.opts.jsx,
+ },
+ };
+ },
+ else => {
+ // "browser": {"module": false}
+ // the module doesn't exist and it's disabled
+ // so we should just not try to load it
+ var primary = Path.init(import_path);
+ primary.is_disabled = true;
+ return .{
+ .success = Result{
+ .path_pair = PathPair{ .primary = primary },
+ .diff_case = null,
+ .jsx = r.opts.jsx,
+ },
+ };
+ },
}
}
@@ -1053,54 +1212,59 @@ pub const Resolver = struct {
}
}
- if (r.resolveWithoutRemapping(source_dir_info, import_path, kind)) |res| {
- result.path_pair = res.path_pair;
- result.dirname_fd = res.dirname_fd;
- result.file_fd = res.file_fd;
- result.package_json = res.package_json;
- result.diff_case = res.diff_case;
- result.is_from_node_modules = result.is_from_node_modules or res.is_node_module;
- result.jsx = r.opts.jsx;
+ switch (r.resolveWithoutRemapping(source_dir_info, import_path, kind, global_cache)) {
+ .success => |res| {
+ result.path_pair = res.path_pair;
+ result.dirname_fd = res.dirname_fd;
+ result.file_fd = res.file_fd;
+ result.package_json = res.package_json;
+ result.diff_case = res.diff_case;
+ result.is_from_node_modules = result.is_from_node_modules or res.is_node_module;
+ result.jsx = r.opts.jsx;
- if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) {
- return result;
- }
+ if (res.path_pair.primary.is_disabled and res.path_pair.secondary == null) {
+ return .{ .success = result };
+ }
- if (res.package_json != null) {
- var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return result;
- if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| {
- if (r.checkBrowserMap(
- browser_scope,
- res.path_pair.primary.text,
- .AbsolutePath,
- )) |remap| {
- if (remap.len == 0) {
- result.path_pair.primary.is_disabled = true;
- result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file");
- } else {
- if (r.resolveWithoutRemapping(browser_scope, remap, kind)) |remapped| {
- result.path_pair = remapped.path_pair;
- result.dirname_fd = remapped.dirname_fd;
- result.file_fd = remapped.file_fd;
- result.package_json = remapped.package_json;
- result.diff_case = remapped.diff_case;
-
- result.is_from_node_modules = result.is_from_node_modules or remapped.is_node_module;
- return result;
+ if (res.package_json != null) {
+ var base_dir_info = res.dir_info orelse (r.readDirInfo(res.path_pair.primary.name.dir) catch null) orelse return .{ .success = result };
+ if (base_dir_info.getEnclosingBrowserScope()) |browser_scope| {
+ if (r.checkBrowserMap(
+ browser_scope,
+ res.path_pair.primary.text,
+ .AbsolutePath,
+ )) |remap| {
+ if (remap.len == 0) {
+ result.path_pair.primary.is_disabled = true;
+ result.path_pair.primary = Fs.Path.initWithNamespace(remap, "file");
+ } else {
+ switch (r.resolveWithoutRemapping(browser_scope, remap, kind, global_cache)) {
+ .success => |remapped| {
+ result.path_pair = remapped.path_pair;
+ result.dirname_fd = remapped.dirname_fd;
+ result.file_fd = remapped.file_fd;
+ result.package_json = remapped.package_json;
+ result.diff_case = remapped.diff_case;
+
+ result.is_from_node_modules = result.is_from_node_modules or remapped.is_node_module;
+ return .{ .success = result };
+ },
+ else => {},
+ }
}
}
}
}
- }
- return result;
- } else {
- // Note: node's "self references" are not currently supported
- return null;
+ return .{ .success = result };
+ },
+ .pending => |p| return .{ .pending = p },
+ .failure => |p| return .{ .failure = p },
+ else => return .{ .not_found = {} },
}
}
- return result;
+ return .{ .success = result };
}
pub fn packageJSONForResolvedNodeModule(
@@ -1201,17 +1365,18 @@ pub const Resolver = struct {
import_path: string,
kind: ast.ImportKind,
_dir_info: *DirInfo,
+ global_cache: GlobalCache,
forbid_imports: bool,
- ) ?MatchResult {
+ ) MatchResult.Union {
var dir_info = _dir_info;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path });
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -1221,7 +1386,7 @@ pub const Resolver = struct {
// Try path substitutions first
if (tsconfig.paths.count() > 0) {
if (r.matchTSConfigPaths(tsconfig, import_path, kind)) |res| {
- return res;
+ return .{ .success = res };
}
}
@@ -1232,7 +1397,7 @@ pub const Resolver = struct {
const abs = r.fs.absBuf(&paths, &load_as_file_or_directory_via_tsconfig_base_path);
if (r.loadAsFileOrDirectory(abs, kind)) |res| {
- return res;
+ return .{ .success = res };
}
// r.allocator.free(abs);
}
@@ -1248,9 +1413,9 @@ pub const Resolver = struct {
if (import_path.len == 1 or strings.hasPrefix(import_path, "#/")) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("The path \"{s}\" must not equal \"#\" and must not start with \"#/\"", .{import_path}) catch {};
+ debug.addNoteFmt("The path \"{s}\" must not equal \"#\" and must not start with \"#/\"", .{import_path});
}
- return null;
+ return .{ .not_found = {} };
}
const esmodule = ESModule{
@@ -1269,24 +1434,34 @@ pub const Resolver = struct {
esm_resolution.path,
kind,
dir_info,
+ global_cache,
true,
);
- return r.handleESMResolution(esm_resolution, package_json.source.path.name.dir, kind, package_json);
+ if (r.handleESMResolution(esm_resolution, package_json.source.path.name.dir, kind, package_json)) |result| {
+ return .{ .success = result };
+ }
+
+ return .{ .not_found = {} };
}
}
}
}
+ var source_dir_info = dir_info;
+ var any_node_modules_folder = false;
+ const use_node_module_resolver = global_cache != .force;
+
// Then check for the package in any enclosing "node_modules" directories
- while (true) {
+ while (use_node_module_resolver) {
// Skip directories that are themselves called "node_modules", since we
// don't ever want to search for "node_modules/node_modules"
- if (dir_info.has_node_modules) {
+ if (dir_info.hasNodeModules()) {
+ any_node_modules_folder = true;
var _paths = [_]string{ dir_info.abs_path, "node_modules", import_path };
const abs_path = r.fs.absBuf(&_paths, &node_modules_check_buf);
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path}) catch {};
+ debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path});
}
if (esm_) |esm| {
@@ -1318,25 +1493,448 @@ pub const Resolver = struct {
// directory path accidentally being interpreted as URL escapes.
const esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root);
- return r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json);
+ if (r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json)) |result| {
+ return .{ .success = result };
+ }
+
+ return .{ .not_found = {} };
}
}
}
}
if (r.loadAsFileOrDirectory(abs_path, kind)) |res| {
- return res;
+ return .{ .success = res };
}
- // r.allocator.free(abs_path);
}
dir_info = dir_info.getParent() orelse break;
}
+ dir_info = source_dir_info;
+
+ // this is the magic!
+ if (global_cache.canUse(any_node_modules_folder) and r.usePackageManager() and esm_ != null) {
+ const esm = esm_.?.withAutoVersion();
+ load_module_from_cache: {
+
+ // If the source directory doesn't have a node_modules directory, we can
+ // check the global cache directory for a package.json file.
+ var manager = r.getPackageManager();
+ var dependency_version: Dependency.Version = .{};
+ var dependency_behavior = @intToEnum(Dependency.Behavior, Dependency.Behavior.normal);
+ // const initial_pending_tasks = manager.pending_tasks;
+ var resolved_package_id: Install.PackageID = brk: {
+ // check if the package.json in the source directory was already added to the lockfile
+ // and try to look up the dependency from there
+ if (dir_info.package_json_for_dependencies) |package_json| {
+ var dependencies_list: []const Dependency = &[_]Dependency{};
+ var string_buf: []const u8 = "";
+ const resolve_from_lockfile = package_json.package_manager_package_id != Install.invalid_package_id;
+
+ if (resolve_from_lockfile) {
+ const dependencies = &manager.lockfile.packages.items(.dependencies)[package_json.package_manager_package_id];
+
+ // try to find this package name in the dependencies of the enclosing package
+ dependencies_list = dependencies.get(manager.lockfile.buffers.dependencies.items);
+ string_buf = manager.lockfile.buffers.string_bytes.items;
+ } else if (esm_.?.version.len == 0) {
+ // If you don't specify a version, default to the one chosen in your package.json
+ dependencies_list = package_json.dependencies.map.values();
+ string_buf = package_json.dependencies.source_buf;
+ }
+
+ var hash: u64 = std.math.maxInt(u64);
+
+ for (dependencies_list) |dependency, dependency_id| {
+ const dep_name_ = &dependency.name;
+ const dep_name = dep_name_.slice(string_buf);
+ if (dep_name.len == esm.name.len) {
+ if (hash == std.math.maxInt(u64)) {
+ hash = bun.hash(dep_name);
+ }
+
+ if (hash != dependency.name_hash) {
+ continue;
+ }
+
+ std.debug.assert(strings.eql(dep_name, esm.name));
+
+ dependency_version = dependency.version;
+ dependency_behavior = dependency.behavior;
+
+ if (resolve_from_lockfile) {
+ const resolutions = &manager.lockfile.packages.items(.resolutions)[package_json.package_manager_package_id];
+
+ // found it!
+ break :brk resolutions.get(manager.lockfile.buffers.resolutions.items)[dependency_id];
+ }
+
+ break;
+ }
+ }
+ }
+
+ // check if the lockfile already resolved this package somewhere
+ {
+ if (dependency_version.tag == .uninitialized) {
+ const sliced_string = Semver.SlicedString.init(esm.version, esm.version);
+ if (esm_.?.version.len > 0 and dir_info.enclosing_package_json != null and global_cache.allowVersionSpecifier()) {
+ return .{ .failure = error.VersionSpecifierNotAllowedHere };
+ }
+ dependency_version = Dependency.parse(
+ r.allocator,
+ esm.version,
+ &sliced_string,
+ r.log,
+ ) orelse break :load_module_from_cache;
+ }
+
+ // first we check if the lockfile already has a version of this package somewhere at all
+ if (manager.lockfile.resolve(esm.name, dependency_version)) |id| {
+ break :brk id;
+ }
+ }
+
+ // If we get here, it means that the lockfile doesn't have this package at all.
+ // we know nothing
+ break :brk Install.invalid_package_id;
+ };
+
+ // Now, there are two possible states:
+ // 1) We have resolved the package ID, either from the
+ // lockfile globally OR from the particular package.json
+ // dependencies list
+ //
+ // 2) We parsed the Dependency.Version but there is no
+ // existing resolved package ID
+
+ // If its an exact version, we can just immediately look it up in the global cache and resolve from there
+ // If the resolved package ID is _not_ invalid, we can just check
+
+ // If this returns null, then it means we need to *resolve* the package
+ // Even after resolution, we might still need to download the package
+ // There are two steps here! Two steps!
+ const resolution: Resolution = brk: {
+ if (resolved_package_id != Install.invalid_package_id) {
+ break :brk manager.lockfile.packages.items(.resolution)[resolved_package_id];
+ }
+
+ // unsupported or not found dependency, we might need to install it to the cache
+ switch (r.enqueueDependencyToResolve(
+ dir_info.package_json_for_dependencies orelse dir_info.package_json,
+ esm,
+ dependency_behavior,
+ &resolved_package_id,
+ dependency_version,
+ )) {
+ .resolution => |res| break :brk res,
+ .pending => |pending| return .{ .pending = pending },
+ .failure => |err| return .{ .failure = err },
+ // this means we looked it up in the registry and the package doesn't exist or the version doesn't exist
+ .not_found => return .{ .not_found = {} },
+ }
+ };
+
+ const dir_path_for_resolution = manager.pathForResolution(resolved_package_id, resolution, &path_in_global_disk_cache_buf) catch |err| {
+ // if it's missing, we need to install it
+ if (err == error.FileNotFound) {
+ switch (manager.getPreinstallState(resolved_package_id, manager.lockfile)) {
+ .done => {
+ var path = Fs.Path.init(import_path);
+ path.is_disabled = true;
+ // this might mean the package is disabled
+ return .{
+ .success = .{
+ .path_pair = .{
+ .primary = path,
+ },
+ },
+ };
+ },
+ .extract, .extracting => |st| {
+ if (!global_cache.canInstall()) {
+ return .{ .not_found = {} };
+ }
+ var builder = Semver.String.Builder{};
+ esm.count(&builder);
+ builder.allocate(manager.allocator) catch unreachable;
+ const cloned = esm.clone(&builder);
+
+ if (st == .extract)
+ manager.enqueuePackageForDownload(
+ esm.name,
+ resolved_package_id,
+ resolution.value.npm.version,
+ manager.lockfile.str(resolution.value.npm.url),
+ .{
+ .root_request_id = 0,
+ },
+ );
+
+ return .{
+ .pending = .{
+ .esm = cloned,
+ .dependency = dependency_version,
+ .resolution_id = resolved_package_id,
+
+ .string_buf = builder.allocatedSlice(),
+ .tag = .download,
+ },
+ };
+ },
+ else => {},
+ }
+ }
+
+ return .{ .failure = err };
+ };
+
+ if (r.dirInfoForResolution(dir_path_for_resolution, resolved_package_id)) |dir_info_to_use_| {
+ if (dir_info_to_use_) |pkg_dir_info| {
+ const abs_package_path = pkg_dir_info.abs_path;
+
+ if (pkg_dir_info.package_json) |package_json| {
+ if (package_json.exports) |exports_map| {
+ // The condition set is determined by the kind of import
+ const esmodule = ESModule{
+ .conditions = switch (kind) {
+ ast.ImportKind.require,
+ ast.ImportKind.require_resolve,
+ => r.opts.conditions.require,
+ else => r.opts.conditions.import,
+ },
+ .allocator = r.allocator,
+ .debug_logs = if (r.debug_logs) |*debug|
+ debug
+ else
+ null,
+ };
+
+ // Resolve against the path "/", then join it with the absolute
+ // directory path. This is done because ESM package resolution uses
+ // URLs while our path resolution uses file system paths. We don't
+ // want problems due to Windows paths, which are very unlike URL
+ // paths. We also want to avoid any "%" characters in the absolute
+ // directory path accidentally being interpreted as URL escapes.
+ const esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root);
+
+ if (r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json)) |*result| {
+ result.is_node_module = true;
+ return .{ .success = result.* };
+ }
+
+ return .{ .not_found = {} };
+ }
+ }
+
+ var _paths = [_]string{ pkg_dir_info.abs_path, esm.subpath };
+ const abs_path = r.fs.absBuf(&_paths, &node_modules_check_buf);
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Checking for a package in the directory \"{s}\"", .{abs_path});
+ }
+
+ if (r.loadAsFileOrDirectory(abs_path, kind)) |*res| {
+ res.is_node_module = true;
+ return .{ .success = res.* };
+ }
+ }
+ } else |err| {
+ return .{ .failure = err };
+ }
+ }
+ }
+
// Mostly to cut scope, we don't resolve `NODE_PATH` environment variable.
// But also: https://github.com/nodejs/node/issues/38128#issuecomment-814969356
+ return .{ .not_found = {} };
+ }
+ fn dirInfoForResolution(
+ r: *ThisResolver,
+ dir_path: []const u8,
+ package_id: Install.PackageID,
+ ) !?*DirInfo {
+ std.debug.assert(r.package_manager != null);
- return null;
+ var dir_cache_info_result = r.dir_cache.getOrPut(dir_path) catch unreachable;
+ if (dir_cache_info_result.status == .exists) {
+ // we've already looked up this package before
+ return r.dir_cache.atIndex(dir_cache_info_result.index).?;
+ }
+ var rfs = &r.fs.fs;
+ var cached_dir_entry_result = rfs.entries.getOrPut(dir_path) catch unreachable;
+
+ var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined;
+ var needs_iter: bool = true;
+ var open_dir = std.fs.openDirAbsolute(dir_path, .{ .iterate = true }) catch |err| {
+ switch (err) {
+ error.FileNotFound => unreachable,
+ else => {
+ // TODO: handle this error better
+ r.log.addErrorFmt(null, logger.Loc.Empty, r.allocator, "Unable to open directory: {s}", .{std.mem.span(@errorName(err))}) catch unreachable;
+ return err;
+ },
+ }
+ };
+
+ if (rfs.entries.atIndex(cached_dir_entry_result.index)) |cached_entry| {
+ if (cached_entry.* == .entries) {
+ dir_entries_option = cached_entry;
+ needs_iter = false;
+ }
+ }
+
+ if (needs_iter) {
+ const allocator = r.fs.allocator;
+ dir_entries_option = rfs.entries.put(&cached_dir_entry_result, .{
+ .entries = Fs.FileSystem.DirEntry.init(dir_path),
+ }) catch unreachable;
+
+ if (FeatureFlags.store_file_descriptors) {
+ Fs.FileSystem.setMaxFd(open_dir.fd);
+ dir_entries_option.entries.fd = open_dir.fd;
+ }
+ var dir_iterator = open_dir.iterate();
+ while (dir_iterator.next() catch null) |_value| {
+ dir_entries_option.entries.addEntry(_value, allocator, void, void{}) catch unreachable;
+ }
+ }
+
+ // We must initialize it as empty so that the result index is correct.
+ // This is important so that browser_scope has a valid index.
+ var dir_info_ptr = r.dir_cache.put(&dir_cache_info_result, DirInfo{}) catch unreachable;
+
+ try r.dirInfoUncached(
+ dir_info_ptr,
+ dir_path,
+ dir_entries_option,
+ dir_cache_info_result,
+ cached_dir_entry_result.index,
+ // Packages in the global disk cache are top-level, we shouldn't try
+ // to check for a parent package.json
+ null,
+ allocators.NotFound,
+ open_dir.fd,
+ package_id,
+ );
+ return dir_info_ptr;
+ }
+
+ const DependencyToResolve = union(enum) {
+ not_found: void,
+ pending: PendingResolution,
+ failure: anyerror,
+ resolution: Resolution,
+ };
+
+ fn enqueueDependencyToResolve(
+ r: *ThisResolver,
+ package_json_: ?*PackageJSON,
+ esm: ESModule.Package,
+ behavior: Dependency.Behavior,
+ input_package_id_: *Install.PackageID,
+ version: Dependency.Version,
+ ) DependencyToResolve {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("Enqueueing pending dependency \"{s}@{s}\"", .{ esm.name, esm.version });
+ }
+
+ const input_package_id = input_package_id_.*;
+ var pm = r.getPackageManager();
+ if (comptime Environment.allow_assert) {
+ // we should never be trying to resolve a dependency that is already resolved
+ std.debug.assert(pm.lockfile.resolve(esm.name, version) == null);
+ }
+
+ // Add the containing package to the lockfile
+
+ var package: Package = .{};
+
+ if (pm.lockfile.packages.len == 0 and input_package_id == Install.invalid_package_id) {
+ if (package_json_) |package_json| {
+ package = Package.fromPackageJSON(
+ pm.allocator,
+ pm.lockfile,
+ r.log,
+ package_json,
+ Install.Features{
+ .dev_dependencies = true,
+ .is_main = true,
+ .dependencies = true,
+ .optional_dependencies = true,
+ },
+ ) catch |err| {
+ return .{ .failure = err };
+ };
+
+ package.resolution = .{
+ .tag = .root,
+ .value = .{ .root = {} },
+ };
+
+ package = pm.lockfile.appendPackage(package) catch |err| {
+ return .{ .failure = err };
+ };
+ package_json.package_manager_package_id = package.meta.id;
+ } else {
+ // we're resolving an unknown package
+ // the unknown package is the root package
+ package = Package{
+ .name = Semver.String.init("", ""),
+ };
+ package.resolution = .{
+ .tag = .root,
+ .value = .{ .root = {} },
+ };
+ package = pm.lockfile.appendPackage(package) catch |err| {
+ return .{ .failure = err };
+ };
+ }
+ }
+
+ if (r.opts.prefer_offline_install) {
+ if (pm.resolveFromDiskCache(esm.name, version)) |package_id| {
+ input_package_id_.* = package_id;
+ return .{ .resolution = pm.lockfile.packages.items(.resolution)[package_id] };
+ }
+ }
+
+ if (input_package_id == Install.invalid_package_id or input_package_id == 0) {
+
+ // All packages are enqueued to the root
+ // because we download all the npm package dependencies
+ switch (pm.enqueueDependencyToRoot(esm.name, esm.version, version, behavior)) {
+ .resolution => |result| {
+ input_package_id_.* = result.package_id;
+ return .{ .resolution = result.resolution };
+ },
+ .pending => |id| {
+ var builder = Semver.String.Builder{};
+ esm.count(&builder);
+ builder.allocate(pm.allocator) catch unreachable;
+ const cloned = esm.clone(&builder);
+
+ return .{
+ .pending = .{
+ .esm = cloned,
+ .dependency = version,
+ .resolution_id = Install.invalid_package_id,
+ .root_dependency_id = id,
+ .string_buf = builder.allocatedSlice(),
+ .tag = .resolve,
+ },
+ };
+ },
+ .not_found => {
+ return .{ .not_found = {} };
+ },
+ .failure => |err| {
+ return .{ .failure = err };
+ },
+ }
+ }
+
+ bun.unreachablePanic("TODO: implement enqueueDependencyToResolve for non-root packages", .{});
}
fn handleESMResolution(r: *ThisResolver, esm_resolution_: ESModule.Resolution, abs_package_path: string, kind: ast.ImportKind, package_json: *PackageJSON) ?MatchResult {
@@ -1409,13 +2007,22 @@ pub const Resolver = struct {
}
}
- pub fn resolveWithoutRemapping(r: *ThisResolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult {
+ pub fn resolveWithoutRemapping(
+ r: *ThisResolver,
+ source_dir_info: *DirInfo,
+ import_path: string,
+ kind: ast.ImportKind,
+ global_cache: GlobalCache,
+ ) MatchResult.Union {
if (isPackagePath(import_path)) {
- return r.loadNodeModules(import_path, kind, source_dir_info, false);
+ return r.loadNodeModules(import_path, kind, source_dir_info, global_cache, false);
} else {
const paths = [_]string{ source_dir_info.abs_path, import_path };
var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf);
- return r.loadAsFileOrDirectory(resolved, kind);
+ if (r.loadAsFileOrDirectory(resolved, kind)) |result| {
+ return .{ .success = result };
+ }
+ return .{ .not_found = {} };
}
}
@@ -1469,12 +2076,34 @@ pub const Resolver = struct {
return bin_folders.constSlice();
}
- pub fn parsePackageJSON(r: *ThisResolver, file: string, dirname_fd: StoredFileDescriptorType) !?*PackageJSON {
+ pub fn parsePackageJSON(
+ r: *ThisResolver,
+ file: string,
+ dirname_fd: StoredFileDescriptorType,
+ package_id: ?Install.PackageID,
+ comptime allow_dependencies: bool,
+ ) !?*PackageJSON {
var pkg: PackageJSON = undefined;
if (!r.care_about_scripts) {
- pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, false) orelse return null;
+ pkg = PackageJSON.parse(
+ r,
+ file,
+ dirname_fd,
+ package_id,
+ true,
+ if (allow_dependencies) .local else .none,
+ false,
+ ) orelse return null;
} else {
- pkg = PackageJSON.parse(ThisResolver, r, file, dirname_fd, true, true) orelse return null;
+ pkg = PackageJSON.parse(
+ r,
+ file,
+ dirname_fd,
+ package_id,
+ true,
+ if (allow_dependencies) .local else .none,
+ true,
+ ) orelse return null;
}
var _pkg = try r.allocator.create(PackageJSON);
@@ -1755,6 +2384,7 @@ pub const Resolver = struct {
r.dir_cache.atIndex(top_parent.index),
top_parent.index,
open_dir.fd,
+ null,
);
if (queue_slice.len == 0) {
@@ -1779,7 +2409,7 @@ pub const Resolver = struct {
// official TypeScript compiler
pub fn matchTSConfigPaths(r: *ThisResolver, tsconfig: *const TSConfigJSON, path: string, kind: ast.ImportKind) ?MatchResult {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path }) catch unreachable;
+ debug.addNoteFmt("Matching \"{s}\" against \"paths\" in \"{s}\"", .{ path, tsconfig.abs_path });
}
var abs_base_url = tsconfig.base_url_for_paths;
@@ -1792,7 +2422,7 @@ pub const Resolver = struct {
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url}) catch unreachable;
+ debug.addNoteFmt("Using \"{s}\" as \"baseURL\"", .{abs_base_url});
}
// Check for exact matches first
@@ -1857,7 +2487,7 @@ pub const Resolver = struct {
// prefix. This matches the behavior of the TypeScript compiler.
if (longest_match_prefix_length > -1) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix }) catch unreachable;
+ debug.addNoteFmt("Found a fuzzy match for \"{s}*{s}\" in \"paths\"", .{ longest_match.prefix, longest_match.suffix });
}
for (longest_match.original_paths) |original_path| {
@@ -1920,7 +2550,7 @@ pub const Resolver = struct {
std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext);
const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len];
// if (r.debug_logs) |*debug| {
- // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path});
// }
if (map.get(new_path)) |_remapped| {
this.remapped = _remapped;
@@ -1950,7 +2580,7 @@ pub const Resolver = struct {
std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len .. index_path.len + ext.len], ext);
const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. index_path.len + ext.len];
// if (r.debug_logs) |*debug| {
- // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}) catch {};
+ // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path});
// }
if (map.get(new_path)) |_remapped| {
this.remapped = _remapped;
@@ -2032,7 +2662,7 @@ pub const Resolver = struct {
// package and the parent package.
const isInSamePackage = brk: {
const parent = dir_info.getParent() orelse break :brk true;
- break :brk !parent.is_node_modules;
+ break :brk !parent.isNodeModules();
};
if (isInSamePackage) {
@@ -2054,13 +2684,13 @@ pub const Resolver = struct {
var field_rel_path = _field_rel_path;
// Is this a directory?
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path }) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Found main field \"{s}\" with path \"{s}\"", .{ field, field_rel_path });
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -2142,7 +2772,7 @@ pub const Resolver = struct {
};
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file: \"{s}\"", .{out_buf}) catch unreachable;
+ debug.addNoteFmt("Found file: \"{s}\"", .{out_buf});
}
if (dir_info.package_json) |package_json| {
@@ -2165,7 +2795,7 @@ pub const Resolver = struct {
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base }) catch unreachable;
+ debug.addNoteFmt("Failed to find file: \"{s}/{s}\"", .{ path, base });
}
}
@@ -2264,13 +2894,13 @@ pub const Resolver = struct {
// Is this a directory?
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path}) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Attempting to load \"{s}\" as a directory", .{path});
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -2290,13 +2920,13 @@ pub const Resolver = struct {
const auto_main = r.opts.main_fields.ptr == options.Platform.DefaultMainFields.get(r.opts.platform).ptr;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text}) catch {};
+ debug.addNoteFmt("Searching for main fields in \"{s}\"", .{pkg_json.source.path.text});
}
for (main_field_keys) |key| {
const field_rel_path = (main_field_values.get(key)) orelse {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Did not find main field \"{s}\"", .{key}) catch {};
+ debug.addNoteFmt("Did not find main field \"{s}\"", .{key});
}
continue;
};
@@ -2331,9 +2961,9 @@ pub const Resolver = struct {
// same time.
if (kind != ast.ImportKind.require) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text }) catch {};
+ debug.addNoteFmt("Resolved to \"{s}\" using the \"module\" field in \"{s}\"", .{ auto_main_result.path_pair.primary.text, pkg_json.source.key_path.text });
- debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text}) catch {};
+ debug.addNoteFmt("The fallback path in case of \"require\" is {s}", .{auto_main_result.path_pair.primary.text});
}
return MatchResult{
@@ -2352,7 +2982,7 @@ pub const Resolver = struct {
auto_main_result.path_pair.primary.text,
key,
pkg_json.source.key_path.text,
- }) catch {};
+ });
}
var _auto_main_result = auto_main_result;
_auto_main_result.package_json = package_json;
@@ -2380,12 +3010,12 @@ pub const Resolver = struct {
var rfs: *Fs.FileSystem.RealFS = &r.fs.fs;
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path}) catch {};
- debug.increaseIndent() catch {};
+ debug.addNoteFmt("Attempting to load \"{s}\" as a file", .{path});
+ debug.increaseIndent();
}
defer {
if (r.debug_logs) |*debug| {
- debug.decreaseIndent() catch {};
+ debug.decreaseIndent();
}
}
@@ -2420,13 +3050,13 @@ pub const Resolver = struct {
// Try the plain path without any extensions
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}\" ", .{base}) catch {};
+ debug.addNoteFmt("Checking for file \"{s}\" ", .{base});
}
if (entries.get(base)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file \"{s}\" ", .{base}) catch {};
+ debug.addNoteFmt("Found file \"{s}\" ", .{base});
}
const abs_path = brk: {
@@ -2455,13 +3085,13 @@ pub const Resolver = struct {
const file_name = buffer[path.len - base.len .. buffer.len];
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Checking for file \"{s}\" ", .{buffer});
}
if (entries.get(file_name)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Found file \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Found file \"{s}\" ", .{buffer});
}
// now that we've found it, we allocate it.
@@ -2513,7 +3143,7 @@ pub const Resolver = struct {
if (entries.get(buffer)) |query| {
if (query.entry.kind(rfs) == .file) {
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer}) catch {};
+ debug.addNoteFmt("Rewrote to \"{s}\" ", .{buffer});
}
return LoadResult{
@@ -2538,14 +3168,14 @@ pub const Resolver = struct {
}
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base}) catch {};
+ debug.addNoteFmt("Failed to rewrite \"{s}\" ", .{base});
}
}
}
}
if (r.debug_logs) |*debug| {
- debug.addNoteFmt("Failed to find \"{s}\" ", .{path}) catch {};
+ debug.addNoteFmt("Failed to find \"{s}\" ", .{path});
}
if (comptime FeatureFlags.watch_directories) {
@@ -2568,6 +3198,7 @@ pub const Resolver = struct {
parent: ?*DirInfo,
parent_index: allocators.IndexType,
fd: FileDescriptorType,
+ package_id: ?Install.PackageID,
) anyerror!void {
var result = _result;
@@ -2587,18 +3218,18 @@ pub const Resolver = struct {
// base must
if (base.len > 1 and base[base.len - 1] == std.fs.path.sep) base = base[0 .. base.len - 1];
- info.is_node_modules = strings.eqlComptime(base, "node_modules");
+ info.flags.setPresent(.is_node_modules, strings.eqlComptime(base, "node_modules"));
// if (entries != null) {
- if (!info.is_node_modules) {
+ if (!info.isNodeModules()) {
if (entries.getComptimeQuery("node_modules")) |entry| {
- info.has_node_modules = (entry.entry.kind(rfs)) == .dir;
+ info.flags.setPresent(.has_node_modules, (entry.entry.kind(rfs)) == .dir);
}
}
if (r.care_about_bin_folder) {
append_bin_dir: {
- if (info.has_node_modules) {
+ if (info.hasNodeModules()) {
if (entries.hasComptimeQuery("node_modules")) {
if (!bin_folders_loaded) {
bin_folders_loaded = true;
@@ -2622,7 +3253,7 @@ pub const Resolver = struct {
}
}
- if (info.is_node_modules) {
+ if (info.isNodeModules()) {
if (entries.getComptimeQuery(".bin")) |q| {
if (q.entry.kind(rfs) == .dir) {
if (!bin_folders_loaded) {
@@ -2663,9 +3294,14 @@ pub const Resolver = struct {
if (parent_package_json.name.len > 0 or r.care_about_bin_folder) {
info.enclosing_package_json = parent_package_json;
}
+
+ if (parent_package_json.dependencies.map.count() > 0 or parent_package_json.package_manager_package_id != Install.invalid_package_id) {
+ info.package_json_for_dependencies = parent_package_json;
+ }
}
info.enclosing_package_json = info.enclosing_package_json orelse parent.?.enclosing_package_json;
+ info.package_json_for_dependencies = info.package_json_for_dependencies orelse parent.?.package_json_for_dependencies;
// Make sure "absRealPath" is the real path of the directory (resolving any symlinks)
if (!r.opts.preserve_symlinks) {
@@ -2677,7 +3313,7 @@ pub const Resolver = struct {
var symlink = entry.symlink(rfs);
if (symlink.len > 0) {
if (r.debug_logs) |*logs| {
- try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
}
info.abs_real_path = symlink;
} else if (parent.?.abs_real_path.len > 0) {
@@ -2686,7 +3322,7 @@ pub const Resolver = struct {
symlink = r.fs.dirname_store.append(string, r.fs.absBuf(&parts, &dir_info_uncached_filename_buf)) catch unreachable;
if (r.debug_logs) |*logs| {
- try logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
+ logs.addNote(std.fmt.allocPrint(r.allocator, "Resolved symlink \"{s}\" to \"{s}\"", .{ path, symlink }) catch unreachable);
}
lookup.entry.cache.symlink = PathString.init(symlink);
info.abs_real_path = symlink;
@@ -2700,7 +3336,10 @@ pub const Resolver = struct {
if (entries.getComptimeQuery("package.json")) |lookup| {
const entry = lookup.entry;
if (entry.kind(rfs) == .file) {
- info.package_json = r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0) catch null;
+ info.package_json = if (r.usePackageManager() and !info.hasNodeModules() and !info.isNodeModules())
+ r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0, package_id, true) catch null
+ else
+ r.parsePackageJSON(path, if (FeatureFlags.store_file_descriptors) fd else 0, null, false) catch null;
if (info.package_json) |pkg| {
if (pkg.browser_map.count() > 0) {
@@ -2711,10 +3350,13 @@ pub const Resolver = struct {
if (pkg.name.len > 0 or r.care_about_bin_folder)
info.enclosing_package_json = pkg;
+ if (pkg.dependencies.map.count() > 0 or pkg.package_manager_package_id != Install.invalid_package_id)
+ info.package_json_for_dependencies = pkg;
+
if (r.debug_logs) |*logs| {
logs.addNoteFmt("Resolved package.json in \"{s}\"", .{
path,
- }) catch unreachable;
+ });
}
}
}
@@ -2836,3 +3478,51 @@ pub const RootPathPair = struct {
base_path: string,
package_json: *const PackageJSON,
};
+
+pub const GlobalCache = enum {
+ allow_install,
+ read_only,
+ auto,
+ force,
+ fallback,
+ disable,
+
+ pub const Map = bun.ComptimeStringMap(GlobalCache, .{
+ .{ "auto", GlobalCache.auto },
+ .{ "force", GlobalCache.force },
+ .{ "disable", GlobalCache.disable },
+ .{ "fallback", GlobalCache.fallback },
+ });
+
+ pub fn allowVersionSpecifier(this: GlobalCache) bool {
+ return this == .force;
+ }
+
+ pub fn canUse(this: GlobalCache, has_a_node_modules_folder: bool) bool {
+ // When there is a node_modules folder, we default to false
+ // When there is NOT a node_modules folder, we default to true
+ // That is the difference between these two branches.
+ if (has_a_node_modules_folder) {
+ return switch (this) {
+ .fallback, .allow_install, .force => true,
+ .read_only, .disable, .auto => false,
+ };
+ } else {
+ return switch (this) {
+ .fallback, .allow_install, .auto, .force => true,
+ .read_only, .disable => false,
+ };
+ }
+ }
+
+ pub fn isEnabled(this: GlobalCache) bool {
+ return this != .disable;
+ }
+
+ pub fn canInstall(this: GlobalCache) bool {
+ return switch (this) {
+ .auto, .allow_install, .force, .fallback => true,
+ else => false,
+ };
+ }
+};
diff --git a/src/string_builder.zig b/src/string_builder.zig
index d46b014e2..adb9e2ae0 100644
--- a/src/string_builder.zig
+++ b/src/string_builder.zig
@@ -77,3 +77,10 @@ pub fn fmt(this: *StringBuilder, comptime str: string, args: anytype) string {
pub fn fmtCount(this: *StringBuilder, comptime str: string, args: anytype) void {
this.cap += std.fmt.count(str, args);
}
+
+pub fn allocatedSlice(this: *StringBuilder) []u8 {
+ var ptr = this.ptr orelse return &[_]u8{};
+ std.debug.assert(this.cap > 0);
+ std.debug.assert(this.len > 0);
+ return ptr[0..this.cap];
+}