diff options
author | 2023-08-29 15:30:07 -0700 | |
---|---|---|
committer | 2023-08-29 15:30:07 -0700 | |
commit | a77ed151afd9ae946b500c3bac6cf1fdb18550ad (patch) | |
tree | d99cda4c5598ce2f9645bbae22cf0c3a735ee535 | |
parent | cf151a256cf4c29a1f2a544c2748c7693e6499f8 (diff) | |
download | bun-a77ed151afd9ae946b500c3bac6cf1fdb18550ad.tar.gz bun-a77ed151afd9ae946b500c3bac6cf1fdb18550ad.tar.zst bun-a77ed151afd9ae946b500c3bac6cf1fdb18550ad.zip |
[git] Normalize line endings
-rw-r--r-- | packages/bun-polyfills/src/global/importmeta.ts | 68 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun.ts | 1000 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/arraybuffersink.ts | 134 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/dns.ts | 42 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/fileblob.ts | 390 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/filesink.ts | 174 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/hashes.ts | 370 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/transpiler.ts | 192 | ||||
-rw-r--r-- | packages/bun-polyfills/src/types/md4.d.ts | 144 | ||||
-rw-r--r-- | packages/bun-polyfills/src/utils/errors.ts | 460 | ||||
-rw-r--r-- | packages/bun-polyfills/src/utils/misc.ts | 72 |
11 files changed, 1523 insertions, 1523 deletions
diff --git a/packages/bun-polyfills/src/global/importmeta.ts b/packages/bun-polyfills/src/global/importmeta.ts index ea8acad80..6776fb668 100644 --- a/packages/bun-polyfills/src/global/importmeta.ts +++ b/packages/bun-polyfills/src/global/importmeta.ts @@ -1,34 +1,34 @@ -import path from 'node:path';
-import { fileURLToPath } from 'node:url';
-import { createRequire } from 'node:module';
-
-// Without an ESM loader, this polyfill is impossible to apply automatically,
-// due to the per-module nature of import.meta. In order to use this polyfill,
-// you must import it in every module that uses import.meta, and call it with
-// the import.meta object as the argument. When the polyfills are integrated
-// with bun build, this could be done automatically by the build process at
-// the top of every module file bundled.
-
-export default function polyfillImportMeta(metaIn: ImportMeta) {
- const require2 = createRequire(metaIn.url);
- const metapath = fileURLToPath(metaIn.url);
- const meta: ImportMeta = {
- url: metaIn.url,
- main: metapath === process.argv[1],
- path: metapath,
- dir: path.dirname(metapath),
- file: path.basename(metapath),
- require: require2,
- async resolve(id: string, parent?: string) {
- return this.resolveSync(id, parent);
- },
- resolveSync(id: string, parent?: string) {
- return require2.resolve(id, {
- paths: typeof parent === 'string' ? [
- path.resolve(parent.startsWith('file://') ? fileURLToPath(parent) : parent, '..')
- ] : undefined,
- });
- },
- };
- Object.assign(metaIn, meta);
-}
+import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { createRequire } from 'node:module'; + +// Without an ESM loader, this polyfill is impossible to apply automatically, +// due to the per-module nature of import.meta. In order to use this polyfill, +// you must import it in every module that uses import.meta, and call it with +// the import.meta object as the argument. When the polyfills are integrated +// with bun build, this could be done automatically by the build process at +// the top of every module file bundled. + +export default function polyfillImportMeta(metaIn: ImportMeta) { + const require2 = createRequire(metaIn.url); + const metapath = fileURLToPath(metaIn.url); + const meta: ImportMeta = { + url: metaIn.url, + main: metapath === process.argv[1], + path: metapath, + dir: path.dirname(metapath), + file: path.basename(metapath), + require: require2, + async resolve(id: string, parent?: string) { + return this.resolveSync(id, parent); + }, + resolveSync(id: string, parent?: string) { + return require2.resolve(id, { + paths: typeof parent === 'string' ? [ + path.resolve(parent.startsWith('file://') ? fileURLToPath(parent) : parent, '..') + ] : undefined, + }); + }, + }; + Object.assign(metaIn, meta); +} diff --git a/packages/bun-polyfills/src/modules/bun.ts b/packages/bun-polyfills/src/modules/bun.ts index caddd2c02..6d7fd1a07 100644 --- a/packages/bun-polyfills/src/modules/bun.ts +++ b/packages/bun-polyfills/src/modules/bun.ts @@ -1,500 +1,500 @@ -import type {
- BunPlugin, PluginConstraints, PluginBuilder, OnLoadCallback, OnResolveCallback, HeapSnapshot,
- EditorOptions, SpawnOptions, Subprocess, SyncSubprocess, FileBlob as BunFileBlob, ArrayBufferView, Hash
-} from 'bun';
-import { TextDecoderStream } from 'node:stream/web';
-import { NotImplementedError, type SystemError } from '../utils/errors.js';
-import { streamToBuffer, isArrayBufferView, isFileBlob, isOptions } from '../utils/misc.js';
-import dnsPolyfill from './bun/dns.js';
-import { FileSink } from './bun/filesink.js';
-import {
- bunHash, bunHashProto,
- MD4 as MD4Polyfill, MD5 as MD5Polyfill,
- SHA1 as SHA1Polyfill, SHA224 as SHA224Polyfill,
- SHA256 as SHA256Polyfill, SHA384 as SHA384Polyfill,
- SHA512 as SHA512Polyfill, SHA512_256 as SHA512_256Polyfill
-} from './bun/hashes.js';
-import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js';
-import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js';
-import TranspilerImpl from './bun/transpiler.js';
-import fs from 'node:fs';
-import v8 from 'node:v8';
-import path from 'node:path';
-import util from 'node:util';
-import zlib from 'node:zlib';
-import streams from 'node:stream';
-import workers from 'node:worker_threads';
-import chp, { type ChildProcess, type StdioOptions, type SpawnSyncReturns } from 'node:child_process';
-import { fileURLToPath as fileURLToPathNode, pathToFileURL as pathToFileURLNode } from 'node:url';
-import npm_which from 'which';
-import openEditor from 'open-editor';
-
-export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satisfies typeof Bun.main;
-
-//? These are automatically updated on build by tools/updateversions.ts, do not edit manually.
-export const version = '0.7.4' satisfies typeof Bun.version;
-export const revision = '56816a3ec845a4b9fc40ade34dbe5c0033433d51' satisfies typeof Bun.revision;
-
-export const gc = (globalThis.gc ? (() => (globalThis.gc!(), process.memoryUsage().heapUsed)) : (() => {
- const err = new Error('[bun-polyfills] Garbage collection polyfills are only available when Node.js is ran with the --expose-gc flag.');
- Error.captureStackTrace(err, gc);
- throw err;
-})) satisfies typeof Bun.gc;
-
-//getter(bun, 'cwd', proc.cwd); //! Can't named export a getter
-export const origin = '' satisfies typeof Bun.origin;
-// @ts-expect-error ---
-export const stdin = new NodeJSStreamFileBlob(process.stdin) satisfies typeof Bun.stdin;
-// @ts-expect-error ---
-export const stdout = new NodeJSStreamFileBlob(process.stdout) satisfies typeof Bun.stdout;
-// @ts-expect-error ---
-export const stderr = new NodeJSStreamFileBlob(process.stderr) satisfies typeof Bun.stderr;
-export const argv = [process.argv0, ...process.execArgv, ...process.argv.slice(1)] satisfies typeof Bun.argv;
-export const env = process.env satisfies typeof Bun.env;
-Object.setPrototypeOf(env, {
- toJSON(this: typeof env) { return { ...this }; }
-});
-// @ts-expect-error supports-color types are unbelievably bad
-export const enableANSIColors = (await import('supports-color')).createSupportsColor().hasBasic satisfies typeof Bun.enableANSIColors;
-
-export const hash = bunHash satisfies typeof Bun.hash;
-Object.setPrototypeOf(hash, bunHashProto satisfies Hash);
-
-export const unsafe = {
- gcAggressionLevel: () => 0, //! no-op
- arrayBufferToString: (buf) => new TextDecoder().decode(buf),
- segfault: () => {
- const segfault = new Error();
- segfault.name = 'SegfaultTest';
- segfault.message = '';
- console.error(segfault);
- process.exit(1);
- }
-} satisfies typeof Bun['unsafe'];
-
-export const Transpiler = TranspilerImpl satisfies typeof Bun.Transpiler;
-
-export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1;
-export const MD5 = MD5Polyfill satisfies typeof Bun.MD5;
-export const MD4 = MD4Polyfill satisfies typeof Bun.MD4;
-export const SHA224 = SHA224Polyfill satisfies typeof Bun.SHA224;
-export const SHA512 = SHA512Polyfill satisfies typeof Bun.SHA512;
-export const SHA384 = SHA384Polyfill satisfies typeof Bun.SHA384;
-export const SHA256 = SHA256Polyfill satisfies typeof Bun.SHA256;
-export const SHA512_256 = SHA512_256Polyfill satisfies typeof Bun.SHA512_256;
-
-export const indexOfLine = ((data, offset) => {
- if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) data = new Uint8Array(data);
- if (data instanceof DataView || !(data instanceof Uint8Array)) data = new Uint8Array(data.buffer);
- return data.indexOf(10, offset);
-}) satisfies typeof Bun.indexOfLine;
-
-const peek_ = function peek(promise: Parameters<typeof Bun.peek>[0]) {
- throw new NotImplementedError('Bun.peek', peek);
-};
-peek_.status = (promise => {
- return util.inspect(promise).includes('<pending>') ? 'pending'
- : util.inspect(promise).includes('<rejected>') ? 'rejected' : 'fulfilled';
-}) satisfies typeof Bun.peek.status;
-export const peek = peek_ satisfies typeof Bun.peek;
-
-export const sleep = (ms => {
- return new Promise(r => setTimeout(r, ms instanceof Date ? ms.valueOf() - Date.now() : ms));
-}) satisfies typeof Bun.sleep;
-export const sleepSync = (ms => {
- if (ms < 0) throw new TypeError('argument to sleepSync must not be negative');
- Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
-}) satisfies typeof Bun.sleepSync;
-
-//? This is not 1:1 matching, but no one should be relying on the exact output of this function anyway.
-//? To quote Node's inspect itself: "The output of util.inspect() may change at any time and should not be depended upon programmatically."
-//? Of course in Node's case some didn't listen and relied on the output of util.inspect() anyway, but hopefully this won't happen with this one.
-export const inspect = ((arg: any): string => util.inspect(arg, {
- breakLength: Infinity,
- colors: false,
- compact: true,
- customInspect: false,
- depth: Infinity,
- getters: true,
- maxArrayLength: Infinity,
- maxStringLength: Infinity,
- showHidden: false,
- showProxy: false,
- sorted: false
-})) satisfies typeof Bun.inspect;
-
-export const resolveSync = ((id: string, parent: string) => import.meta.resolveSync(id, parent)) satisfies typeof Bun.resolveSync;
-export const resolve = (async (id: string, parent: string) => import.meta.resolve!(id, parent)) satisfies typeof Bun.resolve;
-
-//? Yes, this is faster than new Uint8Array(Buffer.allocUnsafe(size).buffer) by about 2.5x in Node.js
-export const allocUnsafe = ((size: number) => new Uint8Array(size)) satisfies typeof Bun.allocUnsafe;
-
-export const generateHeapSnapshot = (async (): Promise<HeapSnapshot> => {
- process.emitWarning('The polyfill for Bun.generateHeapShot is asynchronous, unlike the original which is synchronous.', {
- type: 'BunPolyfillWarning',
- code: 'BUN_POLYFILLS_ASYNC_GENERATE_HEAP_SNAPSHOT',
- detail: 'This is due to v8.getHeapSnapshot() returning a stream in Node.js. This is not a bug, but a limitation of the polyfill.'
- });
- const raw = (await streamToBuffer(v8.getHeapSnapshot())).toString('utf8');
- const json = JSON.parse(raw) as V8HeapSnapshot;
- return {
- version: 2,
- type: 'Inspector',
- nodes: json.nodes,
- edges: json.edges,
- edgeTypes: json.snapshot.meta.edge_types.flat(),
- edgeNames: json.snapshot.meta.edge_fields.flat(),
- nodeClassNames: json.snapshot.meta.node_types.flat(),
- };
- // @ts-expect-error Refer to the above emitWarning call
-}) satisfies typeof Bun.generateHeapSnapshot;
-
-//! This is a no-op in Node.js, as there is no way to shrink the V8 heap from JS as far as I know.
-export const shrink = (() => void 0) satisfies typeof Bun.shrink;
-
-export const openInEditor = ((file: string, opts?: EditorOptions) => {
- const target = [{ file: path.resolve(process.cwd(), file), line: opts?.line, column: opts?.column }] as const;
- if (opts?.editor) openEditor(target, opts);
- else openEditor(target, { editor: process.env.TERM_PROGRAM ?? process.env.VISUAL ?? process.env.EDITOR ?? 'vscode' });
-}) satisfies typeof Bun.openInEditor;
-
-export const serve = (() => { throw new NotImplementedError('Bun.serve', serve); }) satisfies typeof Bun.serve;
-
-export const file = ((path: string | URL | Uint8Array | ArrayBufferLike | number, options?: BlobPropertyBag): BunFileBlob => {
- if (typeof path === 'object') throw new NotImplementedError('Bun.file with typed array', file);
- return new FileBlob(path, options);
-}) satisfies typeof Bun.file;
-
-export const write = (async (dest: BunFileBlob | PathLike, input: string | Blob | TypedArray | ArrayBufferLike | BlobPart[] | Response | BunFileBlob): ReturnType<typeof Bun.write> => {
- if (!isFileBlob(dest)) {
- let fd: number;
- if (dest instanceof ArrayBuffer || dest instanceof SharedArrayBuffer) fd = fs.openSync(Buffer.from(dest), 'w');
- // bun-types thought it'd be funny to make their own URL definition which doesnt match with the correct URL definition...
- else if (typeof dest === 'string' || dest instanceof URL) fd = fs.openSync(dest as import('url').URL, 'w');
- else fd = fs.openSync(Buffer.from(dest.buffer), 'w');
-
- if (input instanceof Response || input instanceof Blob) {
- const data = await input.text();
- return new Promise((resolve, reject) => {
- fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
- });
- }
- if (Array.isArray(input)) {
- const data = await new Blob(input).text();
- return new Promise((resolve, reject) => {
- fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
- });
- }
- return new Promise((resolve, reject) => {
- if (typeof input === 'string') return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
- if (input instanceof Uint8Array) return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
- if (input instanceof ArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
- if (input instanceof SharedArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
- return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
- });
- } else {
- const writer = dest.writer();
- if (Array.isArray(input)) input = new Blob(input);
- if (input instanceof Blob || input instanceof Response) return writer.write(await input.arrayBuffer());
- if (input instanceof ArrayBuffer || input instanceof SharedArrayBuffer || ArrayBuffer.isView(input)) return writer.write(input);
- if (typeof input === 'string') return writer.write(input);
- else return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
- }
-}) satisfies typeof Bun.write;
-
-export const sha = SHA512_256.hash satisfies typeof Bun.sha;
-
-export const nanoseconds = (() => Math.trunc(performance.now() * 1000000)) satisfies typeof Bun.nanoseconds;
-
-//? This just prints out some debug stuff in console, and as the name implies no one should be using it.
-//? But, just in case someone does, we'll make it a no-op function so at least the program doesn't crash trying to run the function.
-export const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = (() => {
- console.warn('DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump called.');
-}) satisfies unknown; /* undocumented */
-
-export const gzipSync = zlib.gzipSync satisfies typeof Bun.gzipSync;
-export const deflateSync = zlib.deflateSync satisfies typeof Bun.deflateSync;
-export const gunzipSync = zlib.gunzipSync satisfies typeof Bun.gunzipSync;
-export const inflateSync = zlib.inflateSync satisfies typeof Bun.inflateSync;
-
-export const which = ((cmd: string, options) => {
- const opts: npm_which.Options = { all: false, nothrow: true };
- if (options?.PATH) opts.path = options.PATH;
- const result = npm_which.sync(cmd, opts) as string | null;
- if (!result || !options?.cwd) return result;
- if (path.normalize(result).includes(path.normalize(options.cwd))) return result;
- else return null;
-}) satisfies typeof Bun.which;
-
-export const spawn = ((...args) => {
- let cmd: string;
- let argv: string[];
- let opts: SpawnOptions.OptionsObject;
-
- if (args[0] instanceof Array) {
- cmd = args[0][0];
- argv = args[0].slice(1);
- opts = isOptions(args[1]) ? args[1] : {};
- } else {
- cmd = args[0].cmd[0];
- argv = args[0].cmd.slice(1);
- opts = args[0];
- Reflect.deleteProperty(opts, 'cmd');
- }
-
- let stdio: StdioOptions = [];
- opts.stdio ??= [undefined, undefined, undefined];
- if (opts.stdin) opts.stdio[0] = opts.stdin;
- if (opts.stdout) opts.stdio[1] = opts.stdout;
- if (opts.stderr) opts.stdio[2] = opts.stderr;
- for (let i = 1; i < 3; i++) { // this intentionally skips stdin
- let std = opts.stdio[i];
- if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
- else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
- else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
- else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
- else stdio[i] = std;
- }
- let stdinSrc: typeof opts.stdio[0] = null;
- if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
- stdinSrc = opts.stdio[0];
- stdio[0] = 'pipe';
- }
-
- const subp = chp.spawn(cmd, argv, {
- cwd: opts.cwd ?? process.cwd(),
- // why is this set to (string | number) on env values...
- env: { ...(opts.env as Record<string, string> ?? process.env) },
- stdio
- }) as unknown as Subprocess;
- const subpAsNode = subp as unknown as ChildProcess;
- const stdstreams = [subpAsNode.stdin, subpAsNode.stdout, subpAsNode.stderr] as const;
- if (subpAsNode.stdout) {
- const rstream = streams.Readable.toWeb(subpAsNode.stdout) as ReadableStream;
- Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
- void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
- return this;
- });
- (<Mutable<Subprocess>>subp).stdout = rstream;
- }
- if (subpAsNode.stderr) {
- const rstream = streams.Readable.toWeb(subpAsNode.stderr) as ReadableStream;
- Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
- void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
- return this;
- });
- (<Mutable<Subprocess>>subp).stderr = rstream;
- }
- let internalStdinStream: streams.Writable;
- if (subpAsNode.stdin) {
- const wstream = subpAsNode.stdin;
- Reflect.set(wstream, 'destroy', function (this: NodeJS.WritableStream, err?: Error) {
- void this.end(); /* if it fails its already closed */
- return this;
- });
- internalStdinStream = wstream;
- (<Mutable<Subprocess>>subp).stdin = new FileSink(wstream);
-
- }
- Object.defineProperty(subp, 'readable', { get(this: Subprocess) { return this.stdout; } });
- Object.defineProperty(subp, 'exited', {
- value: new Promise((resolve, reject) => {
- subpAsNode.once('exit', (code) => {
- stdstreams[0]?.destroy();
- stdstreams[1]?.destroy();
- stdstreams[2]?.destroy();
- subp.kill();
- subp.unref();
- subpAsNode.disconnect?.();
- subpAsNode.removeAllListeners();
- resolve(code);
- });
- })
- });
- if (stdinSrc) subpAsNode.once('spawn', () => {
- const stdinWeb = streams.Writable.toWeb(internalStdinStream);
- if (isArrayBufferView(stdinSrc)) stdinSrc = new Blob([stdinSrc]);
- if (stdinSrc instanceof Blob) void stdinSrc.stream().pipeTo(stdinWeb);
- else if (stdinSrc instanceof Response || stdinSrc instanceof Request) void stdinSrc.body!.pipeTo(stdinWeb);
- else if (typeof stdinSrc === 'number') void fs.createReadStream('', { fd: stdinSrc }).pipe(internalStdinStream);
- else void stdinSrc;
- });
- // change the error stack to point to the spawn() call instead of internal Node.js callback stuff
- const here = new Error('§__PLACEHOLDER__§');
- Error.captureStackTrace(here, spawn);
- if (!subpAsNode.pid) return subpAsNode.once('error', (err: SystemError) => {
- err.message = (err.syscall ?? `spawn ${err.path ?? ''}`) + ' ' + (err.code ?? String(err.errno ?? ''));
- err.stack = here.stack!.replace('§__PLACEHOLDER__§', err.message);
- throw err;
- }) as unknown as Subprocess;
- return subp;
-}) satisfies typeof Bun.spawn;
-export const spawnSync = ((...args): SyncSubprocess => {
- let cmd: string;
- let argv: string[];
- let opts: SpawnOptions.OptionsObject;
- if (args[0] instanceof Array) {
- cmd = args[0][0];
- argv = args[0].slice(1);
- opts = isOptions(args[1]) ? args[1] : {};
- } else {
- cmd = args[0].cmd[0];
- argv = args[0].cmd.slice(1);
- opts = args[0];
- Reflect.deleteProperty(opts, 'cmd');
- }
-
- let stdio: StdioOptions = [];
- opts.stdio ??= [undefined, undefined, undefined];
- if (opts.stdin) opts.stdio[0] = opts.stdin;
- if (opts.stdout) opts.stdio[1] = opts.stdout;
- if (opts.stderr) opts.stdio[2] = opts.stderr;
- for (let i = 1; i < 3; i++) { // this intentionally skips stdin
- let std = opts.stdio[i];
- if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
- else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
- else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
- else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
- else stdio[i] = std;
- }
- let input: ArrayBufferView | string | undefined;
- if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
- stdio[0] = null; // will be overriden by chp.spawnSync "input" option
- //! Due to the fully async nature of Blobs, Responses and Requests,
- //! we can't synchronously get the data out of them here in userland.
- if (opts.stdio[0] instanceof Blob) throw new NotImplementedError('Bun.spawnSync({ stdin: <Blob> })', spawnSync);
- else if (opts.stdio[0] instanceof Response || opts.stdio[0] instanceof Request) throw new NotImplementedError('Bun.spawnSync({ stdin: <Response|Request> })', spawnSync);
- else if (typeof opts.stdio[0] === 'number') input = fs.readFileSync(opts.stdio[0]);
- else input = opts.stdio[0] as ArrayBufferView;
- }
-
- const subp = chp.spawnSync(cmd, argv, {
- cwd: opts.cwd ?? process.cwd(),
- env: { ...(opts.env as Record<string, string> ?? process.env) },
- stdio, input
- }) as unknown as SyncSubprocess;
- const subpAsNode = subp as unknown as SpawnSyncReturns<Buffer>;
- if (subpAsNode.error) throw subpAsNode.error;
-
- subp.exitCode = subpAsNode.status ?? NaN; //! not sure what Bun would return here (child killed by signal)
- subp.success = subp.exitCode === 0;
- return subp;
-}) satisfies typeof Bun.spawnSync;
-
-export const escapeHTML = ((input) => {
- const str = String(input);
- let out = '';
- for (let i = 0; i < str.length; i++) {
- const char = str[i];
- switch (char) {
- case '"': out += '"'; break;
- case "'": out += '''; break;
- case '&': out += '&'; break;
- case '<': out += '<'; break;
- case '>': out += '>'; break;
- default: out += char;
- }
- }
- return out;
-}) satisfies typeof Bun.escapeHTML;
-
-export const readableStreamToArrayBuffer = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): ArrayBuffer | Promise<ArrayBuffer> => {
- return (async () => {
- const sink = new ArrayBufferSink();
- const reader = stream.getReader();
- while (true) {
- const { done, value } = await reader.read();
- if (done) break;
- sink.write(value);
- }
- return sink.end() as ArrayBuffer;
- })();
-}) satisfies typeof Bun.readableStreamToArrayBuffer;
-export const readableStreamToText = (async (stream: ReadableStream<ArrayBufferView | ArrayBuffer>) => {
- let result = '';
- const reader = stream.pipeThrough(new TextDecoderStream()).getReader(); ReadableStreamDefaultReader
- while (true) {
- const { done, value } = await reader.read();
- //! for some reason "done" isnt being set to true so this is just infinitely looping at the moment... sigh
- if (done || !value || !value?.length) break;
- result += value;
- }
- return result;
-}) satisfies typeof Bun.readableStreamToText;
-export const readableStreamToBlob = (async (stream: ReadableStream<any>) => {
- const parts = await readableStreamToArray(stream);
- return new Blob(parts as BlobPart[]);
-}) satisfies typeof Bun.readableStreamToBlob;
-export const readableStreamToArray = (async <T = unknown>(stream: ReadableStream<T>) => {
- const array = new Array<T>();
- const reader = stream.getReader();
- while (true) {
- const { done, value } = await reader.read();
- if (done || !value || !(<any>value)?.length) break;
- array.push(value as unknown as T);
- }
- return array;
-}) satisfies typeof Bun.readableStreamToArray;
-export const readableStreamToJSON = (async <T = unknown>(stream: ReadableStream<Uint8Array>) => {
- const text = await readableStreamToText(stream);
- try {
- return JSON.parse(text) as T;
- } catch (err) {
- Error.captureStackTrace(err as Error, readableStreamToJSON);
- throw err;
- }
-}) satisfies typeof Bun.readableStreamToJSON;
-
-export const concatArrayBuffers = ((buffers) => {
- let size = 0;
- for (const chunk of buffers) size += chunk.byteLength;
- const buffer = new ArrayBuffer(size);
- const view = new Uint8Array(buffer);
- let offset = 0;
- for (const chunk of buffers) {
- view.set(new Uint8Array(chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer ? chunk : chunk.buffer), offset);
- offset += chunk.byteLength;
- }
- return buffer;
-}) satisfies typeof Bun.concatArrayBuffers;
-
-export const ArrayBufferSink = ArrayBufferSinkPolyfill satisfies typeof Bun.ArrayBufferSink;
-
-export const pathToFileURL = pathToFileURLNode satisfies typeof Bun.pathToFileURL;
-export const fileURLToPath = fileURLToPathNode satisfies typeof Bun.fileURLToPath;
-
-export const dns = dnsPolyfill satisfies typeof Bun.dns;
-
-export const isMainThread = workers.isMainThread satisfies typeof Bun.isMainThread;
-
-//! It may be possible to implement plugins with Node ESM loaders, but it would take some effort and have some caveats.
-//! For now, we'll simply make all calls to Bun.plugin no-op, such that manual implementation of an external ESM loader is possible,
-//! but without needing to strip out all Bun.plugin calls from the source code for running on Node.
-const dummyPluginBuilder: PluginBuilder = ({
- onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void {
- return; // stubbed
- },
- onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void {
- return; // stubbed
- },
- config: { plugins: [], entrypoints: [] },
-}) satisfies PluginBuilder;
-const bunPlugin = <T extends BunPlugin>(options: T) => options?.setup?.(dummyPluginBuilder) as ReturnType<T['setup']>;
-bunPlugin.clearAll = () => void 0;
-export const plugin = bunPlugin satisfies typeof Bun.plugin;
-/*void plugin({
- name: 'test',
- target: 'bun',
- setup(builder) {
- if (builder.target !== 'bun') return;
- builder.onResolve({ namespace: 'sample', filter: /.+/ }, args => {
- args.importer;
- if (args.path === 'foo') return { namespace: 'redirect', path: 'bar' };
- else return;
- });
- builder.onLoad({ namespace: 'sample', filter: /.+/ }, args => {
- args.path;
- return { loader: 'object', exports: { foo: 'bar' }, contents: 'void 0;' };
- });
- }
-});*/
+import type { + BunPlugin, PluginConstraints, PluginBuilder, OnLoadCallback, OnResolveCallback, HeapSnapshot, + EditorOptions, SpawnOptions, Subprocess, SyncSubprocess, FileBlob as BunFileBlob, ArrayBufferView, Hash +} from 'bun'; +import { TextDecoderStream } from 'node:stream/web'; +import { NotImplementedError, type SystemError } from '../utils/errors.js'; +import { streamToBuffer, isArrayBufferView, isFileBlob, isOptions } from '../utils/misc.js'; +import dnsPolyfill from './bun/dns.js'; +import { FileSink } from './bun/filesink.js'; +import { + bunHash, bunHashProto, + MD4 as MD4Polyfill, MD5 as MD5Polyfill, + SHA1 as SHA1Polyfill, SHA224 as SHA224Polyfill, + SHA256 as SHA256Polyfill, SHA384 as SHA384Polyfill, + SHA512 as SHA512Polyfill, SHA512_256 as SHA512_256Polyfill +} from './bun/hashes.js'; +import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js'; +import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js'; +import TranspilerImpl from './bun/transpiler.js'; +import fs from 'node:fs'; +import v8 from 'node:v8'; +import path from 'node:path'; +import util from 'node:util'; +import zlib from 'node:zlib'; +import streams from 'node:stream'; +import workers from 'node:worker_threads'; +import chp, { type ChildProcess, type StdioOptions, type SpawnSyncReturns } from 'node:child_process'; +import { fileURLToPath as fileURLToPathNode, pathToFileURL as pathToFileURLNode } from 'node:url'; +import npm_which from 'which'; +import openEditor from 'open-editor'; + +export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satisfies typeof Bun.main; + +//? These are automatically updated on build by tools/updateversions.ts, do not edit manually. +export const version = '0.7.4' satisfies typeof Bun.version; +export const revision = '56816a3ec845a4b9fc40ade34dbe5c0033433d51' satisfies typeof Bun.revision; + +export const gc = (globalThis.gc ? (() => (globalThis.gc!(), process.memoryUsage().heapUsed)) : (() => { + const err = new Error('[bun-polyfills] Garbage collection polyfills are only available when Node.js is ran with the --expose-gc flag.'); + Error.captureStackTrace(err, gc); + throw err; +})) satisfies typeof Bun.gc; + +//getter(bun, 'cwd', proc.cwd); //! Can't named export a getter +export const origin = '' satisfies typeof Bun.origin; +// @ts-expect-error --- +export const stdin = new NodeJSStreamFileBlob(process.stdin) satisfies typeof Bun.stdin; +// @ts-expect-error --- +export const stdout = new NodeJSStreamFileBlob(process.stdout) satisfies typeof Bun.stdout; +// @ts-expect-error --- +export const stderr = new NodeJSStreamFileBlob(process.stderr) satisfies typeof Bun.stderr; +export const argv = [process.argv0, ...process.execArgv, ...process.argv.slice(1)] satisfies typeof Bun.argv; +export const env = process.env satisfies typeof Bun.env; +Object.setPrototypeOf(env, { + toJSON(this: typeof env) { return { ...this }; } +}); +// @ts-expect-error supports-color types are unbelievably bad +export const enableANSIColors = (await import('supports-color')).createSupportsColor().hasBasic satisfies typeof Bun.enableANSIColors; + +export const hash = bunHash satisfies typeof Bun.hash; +Object.setPrototypeOf(hash, bunHashProto satisfies Hash); + +export const unsafe = { + gcAggressionLevel: () => 0, //! no-op + arrayBufferToString: (buf) => new TextDecoder().decode(buf), + segfault: () => { + const segfault = new Error(); + segfault.name = 'SegfaultTest'; + segfault.message = ''; + console.error(segfault); + process.exit(1); + } +} satisfies typeof Bun['unsafe']; + +export const Transpiler = TranspilerImpl satisfies typeof Bun.Transpiler; + +export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1; +export const MD5 = MD5Polyfill satisfies typeof Bun.MD5; +export const MD4 = MD4Polyfill satisfies typeof Bun.MD4; +export const SHA224 = SHA224Polyfill satisfies typeof Bun.SHA224; +export const SHA512 = SHA512Polyfill satisfies typeof Bun.SHA512; +export const SHA384 = SHA384Polyfill satisfies typeof Bun.SHA384; +export const SHA256 = SHA256Polyfill satisfies typeof Bun.SHA256; +export const SHA512_256 = SHA512_256Polyfill satisfies typeof Bun.SHA512_256; + +export const indexOfLine = ((data, offset) => { + if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) data = new Uint8Array(data); + if (data instanceof DataView || !(data instanceof Uint8Array)) data = new Uint8Array(data.buffer); + return data.indexOf(10, offset); +}) satisfies typeof Bun.indexOfLine; + +const peek_ = function peek(promise: Parameters<typeof Bun.peek>[0]) { + throw new NotImplementedError('Bun.peek', peek); +}; +peek_.status = (promise => { + return util.inspect(promise).includes('<pending>') ? 'pending' + : util.inspect(promise).includes('<rejected>') ? 'rejected' : 'fulfilled'; +}) satisfies typeof Bun.peek.status; +export const peek = peek_ satisfies typeof Bun.peek; + +export const sleep = (ms => { + return new Promise(r => setTimeout(r, ms instanceof Date ? ms.valueOf() - Date.now() : ms)); +}) satisfies typeof Bun.sleep; +export const sleepSync = (ms => { + if (ms < 0) throw new TypeError('argument to sleepSync must not be negative'); + Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms); +}) satisfies typeof Bun.sleepSync; + +//? This is not 1:1 matching, but no one should be relying on the exact output of this function anyway. +//? To quote Node's inspect itself: "The output of util.inspect() may change at any time and should not be depended upon programmatically." +//? Of course in Node's case some didn't listen and relied on the output of util.inspect() anyway, but hopefully this won't happen with this one. +export const inspect = ((arg: any): string => util.inspect(arg, { + breakLength: Infinity, + colors: false, + compact: true, + customInspect: false, + depth: Infinity, + getters: true, + maxArrayLength: Infinity, + maxStringLength: Infinity, + showHidden: false, + showProxy: false, + sorted: false +})) satisfies typeof Bun.inspect; + +export const resolveSync = ((id: string, parent: string) => import.meta.resolveSync(id, parent)) satisfies typeof Bun.resolveSync; +export const resolve = (async (id: string, parent: string) => import.meta.resolve!(id, parent)) satisfies typeof Bun.resolve; + +//? Yes, this is faster than new Uint8Array(Buffer.allocUnsafe(size).buffer) by about 2.5x in Node.js +export const allocUnsafe = ((size: number) => new Uint8Array(size)) satisfies typeof Bun.allocUnsafe; + +export const generateHeapSnapshot = (async (): Promise<HeapSnapshot> => { + process.emitWarning('The polyfill for Bun.generateHeapShot is asynchronous, unlike the original which is synchronous.', { + type: 'BunPolyfillWarning', + code: 'BUN_POLYFILLS_ASYNC_GENERATE_HEAP_SNAPSHOT', + detail: 'This is due to v8.getHeapSnapshot() returning a stream in Node.js. This is not a bug, but a limitation of the polyfill.' + }); + const raw = (await streamToBuffer(v8.getHeapSnapshot())).toString('utf8'); + const json = JSON.parse(raw) as V8HeapSnapshot; + return { + version: 2, + type: 'Inspector', + nodes: json.nodes, + edges: json.edges, + edgeTypes: json.snapshot.meta.edge_types.flat(), + edgeNames: json.snapshot.meta.edge_fields.flat(), + nodeClassNames: json.snapshot.meta.node_types.flat(), + }; + // @ts-expect-error Refer to the above emitWarning call +}) satisfies typeof Bun.generateHeapSnapshot; + +//! This is a no-op in Node.js, as there is no way to shrink the V8 heap from JS as far as I know. +export const shrink = (() => void 0) satisfies typeof Bun.shrink; + +export const openInEditor = ((file: string, opts?: EditorOptions) => { + const target = [{ file: path.resolve(process.cwd(), file), line: opts?.line, column: opts?.column }] as const; + if (opts?.editor) openEditor(target, opts); + else openEditor(target, { editor: process.env.TERM_PROGRAM ?? process.env.VISUAL ?? process.env.EDITOR ?? 'vscode' }); +}) satisfies typeof Bun.openInEditor; + +export const serve = (() => { throw new NotImplementedError('Bun.serve', serve); }) satisfies typeof Bun.serve; + +export const file = ((path: string | URL | Uint8Array | ArrayBufferLike | number, options?: BlobPropertyBag): BunFileBlob => { + if (typeof path === 'object') throw new NotImplementedError('Bun.file with typed array', file); + return new FileBlob(path, options); +}) satisfies typeof Bun.file; + +export const write = (async (dest: BunFileBlob | PathLike, input: string | Blob | TypedArray | ArrayBufferLike | BlobPart[] | Response | BunFileBlob): ReturnType<typeof Bun.write> => { + if (!isFileBlob(dest)) { + let fd: number; + if (dest instanceof ArrayBuffer || dest instanceof SharedArrayBuffer) fd = fs.openSync(Buffer.from(dest), 'w'); + // bun-types thought it'd be funny to make their own URL definition which doesnt match with the correct URL definition... + else if (typeof dest === 'string' || dest instanceof URL) fd = fs.openSync(dest as import('url').URL, 'w'); + else fd = fs.openSync(Buffer.from(dest.buffer), 'w'); + + if (input instanceof Response || input instanceof Blob) { + const data = await input.text(); + return new Promise((resolve, reject) => { + fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written)); + }); + } + if (Array.isArray(input)) { + const data = await new Blob(input).text(); + return new Promise((resolve, reject) => { + fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written)); + }); + } + return new Promise((resolve, reject) => { + if (typeof input === 'string') return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written)); + if (input instanceof Uint8Array) return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written)); + if (input instanceof ArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written)); + if (input instanceof SharedArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written)); + return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that. + }); + } else { + const writer = dest.writer(); + if (Array.isArray(input)) input = new Blob(input); + if (input instanceof Blob || input instanceof Response) return writer.write(await input.arrayBuffer()); + if (input instanceof ArrayBuffer || input instanceof SharedArrayBuffer || ArrayBuffer.isView(input)) return writer.write(input); + if (typeof input === 'string') return writer.write(input); + else return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that. + } +}) satisfies typeof Bun.write; + +export const sha = SHA512_256.hash satisfies typeof Bun.sha; + +export const nanoseconds = (() => Math.trunc(performance.now() * 1000000)) satisfies typeof Bun.nanoseconds; + +//? This just prints out some debug stuff in console, and as the name implies no one should be using it. +//? But, just in case someone does, we'll make it a no-op function so at least the program doesn't crash trying to run the function. +export const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = (() => { + console.warn('DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump called.'); +}) satisfies unknown; /* undocumented */ + +export const gzipSync = zlib.gzipSync satisfies typeof Bun.gzipSync; +export const deflateSync = zlib.deflateSync satisfies typeof Bun.deflateSync; +export const gunzipSync = zlib.gunzipSync satisfies typeof Bun.gunzipSync; +export const inflateSync = zlib.inflateSync satisfies typeof Bun.inflateSync; + +export const which = ((cmd: string, options) => { + const opts: npm_which.Options = { all: false, nothrow: true }; + if (options?.PATH) opts.path = options.PATH; + const result = npm_which.sync(cmd, opts) as string | null; + if (!result || !options?.cwd) return result; + if (path.normalize(result).includes(path.normalize(options.cwd))) return result; + else return null; +}) satisfies typeof Bun.which; + +export const spawn = ((...args) => { + let cmd: string; + let argv: string[]; + let opts: SpawnOptions.OptionsObject; + + if (args[0] instanceof Array) { + cmd = args[0][0]; + argv = args[0].slice(1); + opts = isOptions(args[1]) ? args[1] : {}; + } else { + cmd = args[0].cmd[0]; + argv = args[0].cmd.slice(1); + opts = args[0]; + Reflect.deleteProperty(opts, 'cmd'); + } + + let stdio: StdioOptions = []; + opts.stdio ??= [undefined, undefined, undefined]; + if (opts.stdin) opts.stdio[0] = opts.stdin; + if (opts.stdout) opts.stdio[1] = opts.stdout; + if (opts.stderr) opts.stdio[2] = opts.stderr; + for (let i = 1; i < 3; i++) { // this intentionally skips stdin + let std = opts.stdio[i]; + if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream()); + else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream()); + else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std); + else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!); + else stdio[i] = std; + } + let stdinSrc: typeof opts.stdio[0] = null; + if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') { + stdinSrc = opts.stdio[0]; + stdio[0] = 'pipe'; + } + + const subp = chp.spawn(cmd, argv, { + cwd: opts.cwd ?? process.cwd(), + // why is this set to (string | number) on env values... + env: { ...(opts.env as Record<string, string> ?? process.env) }, + stdio + }) as unknown as Subprocess; + const subpAsNode = subp as unknown as ChildProcess; + const stdstreams = [subpAsNode.stdin, subpAsNode.stdout, subpAsNode.stderr] as const; + if (subpAsNode.stdout) { + const rstream = streams.Readable.toWeb(subpAsNode.stdout) as ReadableStream; + Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) { + void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ }); + return this; + }); + (<Mutable<Subprocess>>subp).stdout = rstream; + } + if (subpAsNode.stderr) { + const rstream = streams.Readable.toWeb(subpAsNode.stderr) as ReadableStream; + Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) { + void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ }); + return this; + }); + (<Mutable<Subprocess>>subp).stderr = rstream; + } + let internalStdinStream: streams.Writable; + if (subpAsNode.stdin) { + const wstream = subpAsNode.stdin; + Reflect.set(wstream, 'destroy', function (this: NodeJS.WritableStream, err?: Error) { + void this.end(); /* if it fails its already closed */ + return this; + }); + internalStdinStream = wstream; + (<Mutable<Subprocess>>subp).stdin = new FileSink(wstream); + + } + Object.defineProperty(subp, 'readable', { get(this: Subprocess) { return this.stdout; } }); + Object.defineProperty(subp, 'exited', { + value: new Promise((resolve, reject) => { + subpAsNode.once('exit', (code) => { + stdstreams[0]?.destroy(); + stdstreams[1]?.destroy(); + stdstreams[2]?.destroy(); + subp.kill(); + subp.unref(); + subpAsNode.disconnect?.(); + subpAsNode.removeAllListeners(); + resolve(code); + }); + }) + }); + if (stdinSrc) subpAsNode.once('spawn', () => { + const stdinWeb = streams.Writable.toWeb(internalStdinStream); + if (isArrayBufferView(stdinSrc)) stdinSrc = new Blob([stdinSrc]); + if (stdinSrc instanceof Blob) void stdinSrc.stream().pipeTo(stdinWeb); + else if (stdinSrc instanceof Response || stdinSrc instanceof Request) void stdinSrc.body!.pipeTo(stdinWeb); + else if (typeof stdinSrc === 'number') void fs.createReadStream('', { fd: stdinSrc }).pipe(internalStdinStream); + else void stdinSrc; + }); + // change the error stack to point to the spawn() call instead of internal Node.js callback stuff + const here = new Error('§__PLACEHOLDER__§'); + Error.captureStackTrace(here, spawn); + if (!subpAsNode.pid) return subpAsNode.once('error', (err: SystemError) => { + err.message = (err.syscall ?? `spawn ${err.path ?? ''}`) + ' ' + (err.code ?? String(err.errno ?? '')); + err.stack = here.stack!.replace('§__PLACEHOLDER__§', err.message); + throw err; + }) as unknown as Subprocess; + return subp; +}) satisfies typeof Bun.spawn; +export const spawnSync = ((...args): SyncSubprocess => { + let cmd: string; + let argv: string[]; + let opts: SpawnOptions.OptionsObject; + if (args[0] instanceof Array) { + cmd = args[0][0]; + argv = args[0].slice(1); + opts = isOptions(args[1]) ? args[1] : {}; + } else { + cmd = args[0].cmd[0]; + argv = args[0].cmd.slice(1); + opts = args[0]; + Reflect.deleteProperty(opts, 'cmd'); + } + + let stdio: StdioOptions = []; + opts.stdio ??= [undefined, undefined, undefined]; + if (opts.stdin) opts.stdio[0] = opts.stdin; + if (opts.stdout) opts.stdio[1] = opts.stdout; + if (opts.stderr) opts.stdio[2] = opts.stderr; + for (let i = 1; i < 3; i++) { // this intentionally skips stdin + let std = opts.stdio[i]; + if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream()); + else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream()); + else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std); + else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!); + else stdio[i] = std; + } + let input: ArrayBufferView | string | undefined; + if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') { + stdio[0] = null; // will be overriden by chp.spawnSync "input" option + //! Due to the fully async nature of Blobs, Responses and Requests, + //! we can't synchronously get the data out of them here in userland. + if (opts.stdio[0] instanceof Blob) throw new NotImplementedError('Bun.spawnSync({ stdin: <Blob> })', spawnSync); + else if (opts.stdio[0] instanceof Response || opts.stdio[0] instanceof Request) throw new NotImplementedError('Bun.spawnSync({ stdin: <Response|Request> })', spawnSync); + else if (typeof opts.stdio[0] === 'number') input = fs.readFileSync(opts.stdio[0]); + else input = opts.stdio[0] as ArrayBufferView; + } + + const subp = chp.spawnSync(cmd, argv, { + cwd: opts.cwd ?? process.cwd(), + env: { ...(opts.env as Record<string, string> ?? process.env) }, + stdio, input + }) as unknown as SyncSubprocess; + const subpAsNode = subp as unknown as SpawnSyncReturns<Buffer>; + if (subpAsNode.error) throw subpAsNode.error; + + subp.exitCode = subpAsNode.status ?? NaN; //! not sure what Bun would return here (child killed by signal) + subp.success = subp.exitCode === 0; + return subp; +}) satisfies typeof Bun.spawnSync; + +export const escapeHTML = ((input) => { + const str = String(input); + let out = ''; + for (let i = 0; i < str.length; i++) { + const char = str[i]; + switch (char) { + case '"': out += '"'; break; + case "'": out += '''; break; + case '&': out += '&'; break; + case '<': out += '<'; break; + case '>': out += '>'; break; + default: out += char; + } + } + return out; +}) satisfies typeof Bun.escapeHTML; + +export const readableStreamToArrayBuffer = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): ArrayBuffer | Promise<ArrayBuffer> => { + return (async () => { + const sink = new ArrayBufferSink(); + const reader = stream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + sink.write(value); + } + return sink.end() as ArrayBuffer; + })(); +}) satisfies typeof Bun.readableStreamToArrayBuffer; +export const readableStreamToText = (async (stream: ReadableStream<ArrayBufferView | ArrayBuffer>) => { + let result = ''; + const reader = stream.pipeThrough(new TextDecoderStream()).getReader(); ReadableStreamDefaultReader + while (true) { + const { done, value } = await reader.read(); + //! for some reason "done" isnt being set to true so this is just infinitely looping at the moment... sigh + if (done || !value || !value?.length) break; + result += value; + } + return result; +}) satisfies typeof Bun.readableStreamToText; +export const readableStreamToBlob = (async (stream: ReadableStream<any>) => { + const parts = await readableStreamToArray(stream); + return new Blob(parts as BlobPart[]); +}) satisfies typeof Bun.readableStreamToBlob; +export const readableStreamToArray = (async <T = unknown>(stream: ReadableStream<T>) => { + const array = new Array<T>(); + const reader = stream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done || !value || !(<any>value)?.length) break; + array.push(value as unknown as T); + } + return array; +}) satisfies typeof Bun.readableStreamToArray; +export const readableStreamToJSON = (async <T = unknown>(stream: ReadableStream<Uint8Array>) => { + const text = await readableStreamToText(stream); + try { + return JSON.parse(text) as T; + } catch (err) { + Error.captureStackTrace(err as Error, readableStreamToJSON); + throw err; + } +}) satisfies typeof Bun.readableStreamToJSON; + +export const concatArrayBuffers = ((buffers) => { + let size = 0; + for (const chunk of buffers) size += chunk.byteLength; + const buffer = new ArrayBuffer(size); + const view = new Uint8Array(buffer); + let offset = 0; + for (const chunk of buffers) { + view.set(new Uint8Array(chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer ? chunk : chunk.buffer), offset); + offset += chunk.byteLength; + } + return buffer; +}) satisfies typeof Bun.concatArrayBuffers; + +export const ArrayBufferSink = ArrayBufferSinkPolyfill satisfies typeof Bun.ArrayBufferSink; + +export const pathToFileURL = pathToFileURLNode satisfies typeof Bun.pathToFileURL; +export const fileURLToPath = fileURLToPathNode satisfies typeof Bun.fileURLToPath; + +export const dns = dnsPolyfill satisfies typeof Bun.dns; + +export const isMainThread = workers.isMainThread satisfies typeof Bun.isMainThread; + +//! It may be possible to implement plugins with Node ESM loaders, but it would take some effort and have some caveats. +//! For now, we'll simply make all calls to Bun.plugin no-op, such that manual implementation of an external ESM loader is possible, +//! but without needing to strip out all Bun.plugin calls from the source code for running on Node. +const dummyPluginBuilder: PluginBuilder = ({ + onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void { + return; // stubbed + }, + onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void { + return; // stubbed + }, + config: { plugins: [], entrypoints: [] }, +}) satisfies PluginBuilder; +const bunPlugin = <T extends BunPlugin>(options: T) => options?.setup?.(dummyPluginBuilder) as ReturnType<T['setup']>; +bunPlugin.clearAll = () => void 0; +export const plugin = bunPlugin satisfies typeof Bun.plugin; +/*void plugin({ + name: 'test', + target: 'bun', + setup(builder) { + if (builder.target !== 'bun') return; + builder.onResolve({ namespace: 'sample', filter: /.+/ }, args => { + args.importer; + if (args.path === 'foo') return { namespace: 'redirect', path: 'bar' }; + else return; + }); + builder.onLoad({ namespace: 'sample', filter: /.+/ }, args => { + args.path; + return { loader: 'object', exports: { foo: 'bar' }, contents: 'void 0;' }; + }); + } +});*/ diff --git a/packages/bun-polyfills/src/modules/bun/arraybuffersink.ts b/packages/bun-polyfills/src/modules/bun/arraybuffersink.ts index 5144bafb9..566dbc4b3 100644 --- a/packages/bun-polyfills/src/modules/bun/arraybuffersink.ts +++ b/packages/bun-polyfills/src/modules/bun/arraybuffersink.ts @@ -1,67 +1,67 @@ -type BunArrayBufferSink = InstanceType<typeof Bun.ArrayBufferSink>;
-
-export class ArrayBufferSink implements BunArrayBufferSink {
- #started: boolean = true;
- #closed: boolean = false;
- #offset: number = 0;
- #stream: boolean = false;
- #asUint8: boolean = false;
- #buffer: Buffer = Buffer.allocUnsafe(8192);
-
- get sinkId(): number { return 0; } //? undocumented, seems to always return 0
-
- #ASSERT_NOT_CLOSED(caller: AnyFunction): void {
- if (!this.#closed) return;
- const err = new TypeError('Expected Sink');
- Error.captureStackTrace(err, caller);
- throw err;
- }
-
- start({ asUint8Array = false, highWaterMark = 8192, stream = false }: Parameters<BunArrayBufferSink['start']>[0] = {}): void {
- this.#ASSERT_NOT_CLOSED(this.start);
- this.#started = true;
- this.#offset = 0;
- this.#stream = stream;
- this.#asUint8 = asUint8Array;
- if (highWaterMark !== this.#buffer.byteLength) this.#buffer = Buffer.allocUnsafe(highWaterMark);
- }
-
- write(data: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
- this.#ASSERT_NOT_CLOSED(this.write);
- if (typeof data === 'string') data = new TextEncoder().encode(data);
- const writedata = (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) ? new Uint8Array(data) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
- // this is very bad API design to not throw an error here, but it's what Bun does
- if (!this.#started) return writedata.byteLength;
-
- if (this.#offset + writedata.byteLength > this.#buffer.byteLength) {
- const newLength = Math.ceil((this.#offset + writedata.byteLength) / 1024) * 1024;
- const newBuffer = Buffer.allocUnsafe(newLength);
- newBuffer.set(this.#buffer);
- this.#buffer = newBuffer;
- }
- this.#buffer.set(writedata, this.#offset);
- this.#offset += writedata.byteLength;
- return writedata.byteLength;
- }
-
- flush(): number | Uint8Array | ArrayBuffer {
- this.#ASSERT_NOT_CLOSED(this.flush);
- if (!this.#stream) return 0; //! brokenly seems to always return 0 and do nothing
- const flushed = new Uint8Array(this.#offset);
- flushed.set(this.#buffer.subarray(0, this.#offset)); // faster than Buffer.copy or Uint8Array.slice
- this.#offset = 0;
- return this.#asUint8 ? flushed : flushed.buffer as ArrayBuffer;
- }
-
- end(): Uint8Array | ArrayBuffer {
- this.#ASSERT_NOT_CLOSED(this.end);
- const stream = this.#stream;
- this.#stream = true; // force flush() to return the data
- const buffer = this.flush() as Uint8Array | ArrayBuffer;
- this.#stream = stream;
- this.#started = false;
- return buffer;
- }
-
- close(): void { this.#closed = true; } //? undocumented
-}
+type BunArrayBufferSink = InstanceType<typeof Bun.ArrayBufferSink>; + +export class ArrayBufferSink implements BunArrayBufferSink { + #started: boolean = true; + #closed: boolean = false; + #offset: number = 0; + #stream: boolean = false; + #asUint8: boolean = false; + #buffer: Buffer = Buffer.allocUnsafe(8192); + + get sinkId(): number { return 0; } //? undocumented, seems to always return 0 + + #ASSERT_NOT_CLOSED(caller: AnyFunction): void { + if (!this.#closed) return; + const err = new TypeError('Expected Sink'); + Error.captureStackTrace(err, caller); + throw err; + } + + start({ asUint8Array = false, highWaterMark = 8192, stream = false }: Parameters<BunArrayBufferSink['start']>[0] = {}): void { + this.#ASSERT_NOT_CLOSED(this.start); + this.#started = true; + this.#offset = 0; + this.#stream = stream; + this.#asUint8 = asUint8Array; + if (highWaterMark !== this.#buffer.byteLength) this.#buffer = Buffer.allocUnsafe(highWaterMark); + } + + write(data: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number { + this.#ASSERT_NOT_CLOSED(this.write); + if (typeof data === 'string') data = new TextEncoder().encode(data); + const writedata = (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) ? new Uint8Array(data) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + // this is very bad API design to not throw an error here, but it's what Bun does + if (!this.#started) return writedata.byteLength; + + if (this.#offset + writedata.byteLength > this.#buffer.byteLength) { + const newLength = Math.ceil((this.#offset + writedata.byteLength) / 1024) * 1024; + const newBuffer = Buffer.allocUnsafe(newLength); + newBuffer.set(this.#buffer); + this.#buffer = newBuffer; + } + this.#buffer.set(writedata, this.#offset); + this.#offset += writedata.byteLength; + return writedata.byteLength; + } + + flush(): number | Uint8Array | ArrayBuffer { + this.#ASSERT_NOT_CLOSED(this.flush); + if (!this.#stream) return 0; //! brokenly seems to always return 0 and do nothing + const flushed = new Uint8Array(this.#offset); + flushed.set(this.#buffer.subarray(0, this.#offset)); // faster than Buffer.copy or Uint8Array.slice + this.#offset = 0; + return this.#asUint8 ? flushed : flushed.buffer as ArrayBuffer; + } + + end(): Uint8Array | ArrayBuffer { + this.#ASSERT_NOT_CLOSED(this.end); + const stream = this.#stream; + this.#stream = true; // force flush() to return the data + const buffer = this.flush() as Uint8Array | ArrayBuffer; + this.#stream = stream; + this.#started = false; + return buffer; + } + + close(): void { this.#closed = true; } //? undocumented +} diff --git a/packages/bun-polyfills/src/modules/bun/dns.ts b/packages/bun-polyfills/src/modules/bun/dns.ts index b87470f9c..7f6be8588 100644 --- a/packages/bun-polyfills/src/modules/bun/dns.ts +++ b/packages/bun-polyfills/src/modules/bun/dns.ts @@ -1,21 +1,21 @@ -import dns from 'node:dns';
-
-const dnsObj: typeof Bun.dns = {
- async lookup(hostname, options) {
- const opts = { verbatim: true, all: true } as dns.LookupOptions;
- if (options?.family) {
- if (options.family === 'IPv4') opts.family = 4;
- else if (options.family === 'IPv6') opts.family = 6;
- else if (options.family === 'any') opts.family = 0;
- else opts.family = options.family;
- }
- if (options?.flags) opts.hints = options.flags;
- const records = ((await dns.promises.resolveAny(hostname))
- .filter(r => r.type === 'A' || r.type === 'AAAA') as (dns.AnyARecord | dns.AnyAaaaRecord)[])
- .map(r => ({ address: r.address, family: r.type === 'A' ? 4 as const : 6 as const, ttl: r.ttl }));
- return records;
- },
- // This has more properties but they're not documented on bun-types yet, oh well.
-};
-
-export default dnsObj;
+import dns from 'node:dns'; + +const dnsObj: typeof Bun.dns = { + async lookup(hostname, options) { + const opts = { verbatim: true, all: true } as dns.LookupOptions; + if (options?.family) { + if (options.family === 'IPv4') opts.family = 4; + else if (options.family === 'IPv6') opts.family = 6; + else if (options.family === 'any') opts.family = 0; + else opts.family = options.family; + } + if (options?.flags) opts.hints = options.flags; + const records = ((await dns.promises.resolveAny(hostname)) + .filter(r => r.type === 'A' || r.type === 'AAAA') as (dns.AnyARecord | dns.AnyAaaaRecord)[]) + .map(r => ({ address: r.address, family: r.type === 'A' ? 4 as const : 6 as const, ttl: r.ttl })); + return records; + }, + // This has more properties but they're not documented on bun-types yet, oh well. +}; + +export default dnsObj; diff --git a/packages/bun-polyfills/src/modules/bun/fileblob.ts b/packages/bun-polyfills/src/modules/bun/fileblob.ts index abe2294da..bc96df345 100644 --- a/packages/bun-polyfills/src/modules/bun/fileblob.ts +++ b/packages/bun-polyfills/src/modules/bun/fileblob.ts @@ -1,195 +1,195 @@ -import fs from 'node:fs';
-import tty from 'node:tty';
-import streams from 'node:stream';
-import { ReadableStream as NodeWebReadableStream } from 'node:stream/web';
-import { FileSink } from './filesink.js';
-import { SystemError } from '../../utils/errors.js';
-import type { FileBlob as BunFileBlob, FileSink as BunFileSink } from 'bun';
-
-type NodeJSStream = streams.Readable | streams.Writable;
-
-function NodeJSReadableStreamToBlob(stream: NodeJS.ReadableStream | NodeJS.ReadWriteStream, iostream: boolean = false, type?: string): Promise<Blob> {
- if (stream.isPaused()) stream.resume();
- return new Promise((resolve, reject) => {
- const chunks: any[] = [];
- const dataHandler = (chunk: any) => { chunks.push(chunk); if (iostream) end(); };
- const end = () => {
- resolve(new Blob(chunks, type != null ? { type } : undefined));
- stream.off('data', dataHandler);
- stream.off('end', end);
- stream.pause();
- };
- stream.once('data', dataHandler).once('end', end);
- //.once('error', reject); Bun waits to error on actual operations on the stream, therefore so will we.
- });
-}
-
-export const NodeJSStreamFileBlob = class FileBlob extends Blob {
- constructor(source: NodeJSStream, slice: [number?, number?] = [undefined, undefined], type = 'application/octet-stream') {
- super(undefined, { type });
- Reflect.deleteProperty(this, 'size');
- if (source === process.stdout || source === process.stdin || source === process.stderr) {
- this.#iostream = true;
- }
- this.#readable = source instanceof streams.Readable && !(source instanceof tty.WriteStream);
- this.#source = source;
- this.#slice = slice;
- this.#size = Infinity;
- }
- readonly #iostream: boolean = false;
- readonly #readable: boolean;
- readonly #source: NodeJSStream;
- readonly #slice: [number?, number?];
- #size: number;
-
- slice(begin?: number, end?: number, contentType?: string): Blob;
- slice(begin?: number, contentType?: string): Blob;
- slice(contentType?: string): Blob;
- slice(beginOrType?: number | string, endOrType?: number | string, contentType: string = this.type): Blob {
- if (typeof beginOrType === 'string') return new FileBlob(this.#source, this.#slice, beginOrType);
- if (typeof endOrType === 'string') return new FileBlob(this.#source, [beginOrType, undefined], endOrType);
- return new FileBlob(this.#source, [beginOrType, endOrType], contentType);
- }
-
- override stream(): ReadableStream<Uint8Array> {
- // This makes no sense but Bun does it so we will too
- if (!this.#readable) return new ReadableStream();
- return streams.Readable.toWeb(this.#source as streams.Readable);
- }
-
- #blobStackFn: AnyFunction = this.#getBlob;
-
- async #getBlob(): Promise<Blob> {
- if (!this.#readable) {
- const err = new SystemError(-1, 'read');
- Error.captureStackTrace(err, this.#blobStackFn);
- throw err;
- }
- const blob = (await NodeJSReadableStreamToBlob(this.#source as streams.Readable, this.#iostream)).slice(...this.#slice);
- this.#size = blob.size;
- return blob;
- }
-
- override async text(): Promise<string> {
- if (this.#blobStackFn !== this.json) this.#blobStackFn = this.text;
- return (await this.#getBlob()).text();
- }
- override async arrayBuffer(): Promise<ArrayBuffer> {
- this.#blobStackFn = this.arrayBuffer;
- return (await this.#getBlob()).arrayBuffer();
- }
- override async json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> {
- this.#blobStackFn = this.json;
- return JSON.parse(await this.text()) as Promise<TJSONReturnType>;
- }
-
- override get size(): number { return this.#size; }
- override set size(_) { return; }
-};
-
-export class FileBlob extends Blob implements BunFileBlob {
- constructor(fdOrPath: number | string, opts: BlobPropertyBag = {}) {
- opts.type ??= 'application/octet-stream'; // TODO: Get MIME type from file extension
- super(undefined, opts);
- Reflect.deleteProperty(this, 'size');
- if (Reflect.get(opts, '__data')) this.#data = Reflect.get(opts, '__data') as Blob;
- const slice = Reflect.get(opts, '__slice') as [number?, number?] | undefined;
- if (slice) {
- slice[0] &&= slice[0] | 0; // int cast
- slice[1] &&= slice[1] | 0; // int cast
- this.#slice = slice;
- slice[0] ??= 0;
- if (typeof slice[1] === 'undefined') {
- if (slice[0] < 0) this.#sliceSize = -slice[0];
- }
- else if (slice[0] < 0 && slice[1] < 0) this.#sliceSize = -(slice[0] - slice[1]);
- else if (slice[0] >= 0 && slice[1] >= 0) this.#sliceSize = slice[1] - slice[0];
- }
- if (typeof fdOrPath === 'string') try {
- this.#fd = fs.openSync(fdOrPath, 'r+');
- } catch (err) {
- this.#error = err as SystemError;
- }
- else {
- this.#fd = fdOrPath;
- this.#error = Reflect.get(opts, '__error') as SystemError | undefined;
- }
- if (!this.#error) {
- const rstream = fs.createReadStream('', { fd: this.#fd, start: this.#slice[0], end: this.#slice[1] });
- this.#readable = streams.Readable.toWeb(rstream);
- }
- }
- readonly #readable?: NodeWebReadableStream;
- readonly #error?: SystemError;
- readonly #slice: [number?, number?] = [];
- readonly #sliceSize: number = 0;
- readonly #fd: number = NaN;
- #data?: Blob;
-
- #read() {
- if (this.#error) throw this.#error;
- const read = fs.readFileSync(this.#fd);
- this.#data = new Blob([read.subarray(...this.#slice)], { type: this.type });
- }
-
- //! Bun 0.2 seems to return undefined for this, this might not be accurate or it's broken on Bun's side
- get readable(): ReadableStream<any> {
- if (this.#error) throw this.#error;
- return this.#readable! as ReadableStream;
- }
-
- get lastModified(): number {
- if (this.#error) throw this.#error;
- return fs.fstatSync(this.#fd).mtimeMs;
- }
-
- async exists(): Promise<boolean> {
- return !this.#error;
- }
-
- writer(): BunFileSink {
- if (this.#error) throw this.#error;
- return new FileSink(this.#fd);
- }
-
- // TODO: what's contentType?
- override slice(begin?: number | string, end?: number | string, contentType?: string): FileBlob {
- if (typeof begin === 'string') {
- contentType = begin;
- begin = undefined;
- }
- if (typeof end === 'string') {
- contentType = end;
- end = undefined;
- }
- return new FileBlob(this.#fd, {
- __error: this.#error,
- __slice: [begin, end],
- __data: this.#data?.slice(begin, end),
- } as BlobPropertyBag);
- }
- override arrayBuffer(): Promise<ArrayBuffer> {
- if (!this.#data) this.#read();
- return new Blob([this.#data ?? '']).arrayBuffer();
- }
- override text(): Promise<string> {
- if (!this.#data) this.#read();
- return new Blob([this.#data ?? '']).text();
- }
- override json(): Promise<any>;
- override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
- override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> | Promise<any> {
- if (!this.#data) this.#read();
- return new Blob([this.#data ?? '']).json();
- }
- override stream(): NodeJS.ReadableStream;
- override stream(): ReadableStream<Uint8Array>;
- override stream(): ReadableStream<Uint8Array> | NodeJS.ReadableStream {
- if (!this.#data) this.#read();
- return new Blob([this.#data ?? '']).stream();
- }
-
- override get size(): number {
- return this.#data?.size ?? (this.#sliceSize || 0);
- }
-}
+import fs from 'node:fs'; +import tty from 'node:tty'; +import streams from 'node:stream'; +import { ReadableStream as NodeWebReadableStream } from 'node:stream/web'; +import { FileSink } from './filesink.js'; +import { SystemError } from '../../utils/errors.js'; +import type { FileBlob as BunFileBlob, FileSink as BunFileSink } from 'bun'; + +type NodeJSStream = streams.Readable | streams.Writable; + +function NodeJSReadableStreamToBlob(stream: NodeJS.ReadableStream | NodeJS.ReadWriteStream, iostream: boolean = false, type?: string): Promise<Blob> { + if (stream.isPaused()) stream.resume(); + return new Promise((resolve, reject) => { + const chunks: any[] = []; + const dataHandler = (chunk: any) => { chunks.push(chunk); if (iostream) end(); }; + const end = () => { + resolve(new Blob(chunks, type != null ? { type } : undefined)); + stream.off('data', dataHandler); + stream.off('end', end); + stream.pause(); + }; + stream.once('data', dataHandler).once('end', end); + //.once('error', reject); Bun waits to error on actual operations on the stream, therefore so will we. + }); +} + +export const NodeJSStreamFileBlob = class FileBlob extends Blob { + constructor(source: NodeJSStream, slice: [number?, number?] = [undefined, undefined], type = 'application/octet-stream') { + super(undefined, { type }); + Reflect.deleteProperty(this, 'size'); + if (source === process.stdout || source === process.stdin || source === process.stderr) { + this.#iostream = true; + } + this.#readable = source instanceof streams.Readable && !(source instanceof tty.WriteStream); + this.#source = source; + this.#slice = slice; + this.#size = Infinity; + } + readonly #iostream: boolean = false; + readonly #readable: boolean; + readonly #source: NodeJSStream; + readonly #slice: [number?, number?]; + #size: number; + + slice(begin?: number, end?: number, contentType?: string): Blob; + slice(begin?: number, contentType?: string): Blob; + slice(contentType?: string): Blob; + slice(beginOrType?: number | string, endOrType?: number | string, contentType: string = this.type): Blob { + if (typeof beginOrType === 'string') return new FileBlob(this.#source, this.#slice, beginOrType); + if (typeof endOrType === 'string') return new FileBlob(this.#source, [beginOrType, undefined], endOrType); + return new FileBlob(this.#source, [beginOrType, endOrType], contentType); + } + + override stream(): ReadableStream<Uint8Array> { + // This makes no sense but Bun does it so we will too + if (!this.#readable) return new ReadableStream(); + return streams.Readable.toWeb(this.#source as streams.Readable); + } + + #blobStackFn: AnyFunction = this.#getBlob; + + async #getBlob(): Promise<Blob> { + if (!this.#readable) { + const err = new SystemError(-1, 'read'); + Error.captureStackTrace(err, this.#blobStackFn); + throw err; + } + const blob = (await NodeJSReadableStreamToBlob(this.#source as streams.Readable, this.#iostream)).slice(...this.#slice); + this.#size = blob.size; + return blob; + } + + override async text(): Promise<string> { + if (this.#blobStackFn !== this.json) this.#blobStackFn = this.text; + return (await this.#getBlob()).text(); + } + override async arrayBuffer(): Promise<ArrayBuffer> { + this.#blobStackFn = this.arrayBuffer; + return (await this.#getBlob()).arrayBuffer(); + } + override async json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> { + this.#blobStackFn = this.json; + return JSON.parse(await this.text()) as Promise<TJSONReturnType>; + } + + override get size(): number { return this.#size; } + override set size(_) { return; } +}; + +export class FileBlob extends Blob implements BunFileBlob { + constructor(fdOrPath: number | string, opts: BlobPropertyBag = {}) { + opts.type ??= 'application/octet-stream'; // TODO: Get MIME type from file extension + super(undefined, opts); + Reflect.deleteProperty(this, 'size'); + if (Reflect.get(opts, '__data')) this.#data = Reflect.get(opts, '__data') as Blob; + const slice = Reflect.get(opts, '__slice') as [number?, number?] | undefined; + if (slice) { + slice[0] &&= slice[0] | 0; // int cast + slice[1] &&= slice[1] | 0; // int cast + this.#slice = slice; + slice[0] ??= 0; + if (typeof slice[1] === 'undefined') { + if (slice[0] < 0) this.#sliceSize = -slice[0]; + } + else if (slice[0] < 0 && slice[1] < 0) this.#sliceSize = -(slice[0] - slice[1]); + else if (slice[0] >= 0 && slice[1] >= 0) this.#sliceSize = slice[1] - slice[0]; + } + if (typeof fdOrPath === 'string') try { + this.#fd = fs.openSync(fdOrPath, 'r+'); + } catch (err) { + this.#error = err as SystemError; + } + else { + this.#fd = fdOrPath; + this.#error = Reflect.get(opts, '__error') as SystemError | undefined; + } + if (!this.#error) { + const rstream = fs.createReadStream('', { fd: this.#fd, start: this.#slice[0], end: this.#slice[1] }); + this.#readable = streams.Readable.toWeb(rstream); + } + } + readonly #readable?: NodeWebReadableStream; + readonly #error?: SystemError; + readonly #slice: [number?, number?] = []; + readonly #sliceSize: number = 0; + readonly #fd: number = NaN; + #data?: Blob; + + #read() { + if (this.#error) throw this.#error; + const read = fs.readFileSync(this.#fd); + this.#data = new Blob([read.subarray(...this.#slice)], { type: this.type }); + } + + //! Bun 0.2 seems to return undefined for this, this might not be accurate or it's broken on Bun's side + get readable(): ReadableStream<any> { + if (this.#error) throw this.#error; + return this.#readable! as ReadableStream; + } + + get lastModified(): number { + if (this.#error) throw this.#error; + return fs.fstatSync(this.#fd).mtimeMs; + } + + async exists(): Promise<boolean> { + return !this.#error; + } + + writer(): BunFileSink { + if (this.#error) throw this.#error; + return new FileSink(this.#fd); + } + + // TODO: what's contentType? + override slice(begin?: number | string, end?: number | string, contentType?: string): FileBlob { + if (typeof begin === 'string') { + contentType = begin; + begin = undefined; + } + if (typeof end === 'string') { + contentType = end; + end = undefined; + } + return new FileBlob(this.#fd, { + __error: this.#error, + __slice: [begin, end], + __data: this.#data?.slice(begin, end), + } as BlobPropertyBag); + } + override arrayBuffer(): Promise<ArrayBuffer> { + if (!this.#data) this.#read(); + return new Blob([this.#data ?? '']).arrayBuffer(); + } + override text(): Promise<string> { + if (!this.#data) this.#read(); + return new Blob([this.#data ?? '']).text(); + } + override json(): Promise<any>; + override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>; + override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> | Promise<any> { + if (!this.#data) this.#read(); + return new Blob([this.#data ?? '']).json(); + } + override stream(): NodeJS.ReadableStream; + override stream(): ReadableStream<Uint8Array>; + override stream(): ReadableStream<Uint8Array> | NodeJS.ReadableStream { + if (!this.#data) this.#read(); + return new Blob([this.#data ?? '']).stream(); + } + + override get size(): number { + return this.#data?.size ?? (this.#sliceSize || 0); + } +} diff --git a/packages/bun-polyfills/src/modules/bun/filesink.ts b/packages/bun-polyfills/src/modules/bun/filesink.ts index 4767814fd..c3370ff68 100644 --- a/packages/bun-polyfills/src/modules/bun/filesink.ts +++ b/packages/bun-polyfills/src/modules/bun/filesink.ts @@ -1,87 +1,87 @@ -import fs from 'node:fs';
-import { SystemError } from '../../utils/errors.js';
-import type { FileSink as BunFileSink } from 'bun';
-
-export class FileSink implements BunFileSink {
- constructor(fdOrPathOrStream: number | string | NodeJS.WritableStream) {
- if (typeof fdOrPathOrStream === 'string') try {
- this.#fd = fs.openSync(fdOrPathOrStream, 'a+');
- fs.ftruncateSync(this.#fd, 0);
- } catch (err) {
- throw err as SystemError;
- }
- else if (typeof fdOrPathOrStream === 'number') {
- this.#fd = fdOrPathOrStream; // hope this fd is writable
- fs.ftruncateSync(this.#fd, 0);
- }
- else {
- this.#stream = fdOrPathOrStream;
- }
- }
- #fd: number = NaN;
- #stream: NodeJS.WritableStream | undefined;
- #closed: boolean = false;
- #writtenSinceFlush: number = 0;
- #totalWritten: number = 0;
-
- start(options?: { highWaterMark?: number | undefined; } | undefined): void {
- return; // TODO
- }
-
- ref(): void {
- return; // TODO
- }
-
- unref(): void {
- return; // TODO
- }
-
- write(chunk: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
- if (this.#closed) {
- return typeof chunk === 'string' ? chunk.length : chunk.byteLength;
- }
- if (this.#stream) {
- let data;
- if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) data = new Uint8Array(chunk);
- else if (!(chunk instanceof Uint8Array) && typeof chunk !== 'string') data = new Uint8Array(chunk.buffer);
- else data = chunk;
- this.#stream.write(data);
- const written = typeof data === 'string' ? data.length : data.byteLength;
- this.#totalWritten += written;
- return written;
- }
- if (typeof chunk === 'string') {
- fs.appendFileSync(this.#fd, chunk, 'utf8');
- this.#writtenSinceFlush += chunk.length;
- return chunk.length;
- }
- if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) fs.appendFileSync(this.#fd, new Uint8Array(chunk));
- else fs.appendFileSync(this.#fd, new Uint8Array(chunk.buffer));
- this.#writtenSinceFlush += chunk.byteLength;
- return chunk.byteLength;
- }
-
- //! flushing after writing to a closed FileSink segfaults in Bun but I don't see the need to implement that behavior
- flush(): number | Promise<number> {
- if (this.#closed) return 0;
- // no-op because this is a synchronous implementation
- const written = this.#writtenSinceFlush;
- this.#writtenSinceFlush = 0;
- return written;
- }
-
- //! not sure what to do with this error
- end(error?: Error): number | Promise<number> {
- if (this.#closed) return this.#totalWritten;
- const flushed = this.flush();
- if (this.#stream) {
- this.#stream.end();
- this.#closed = true;
- return flushed;
- }
- this.#totalWritten = fs.fstatSync(this.#fd).size;
- fs.closeSync(this.#fd);
- this.#closed = true;
- return flushed;
- }
-}
+import fs from 'node:fs'; +import { SystemError } from '../../utils/errors.js'; +import type { FileSink as BunFileSink } from 'bun'; + +export class FileSink implements BunFileSink { + constructor(fdOrPathOrStream: number | string | NodeJS.WritableStream) { + if (typeof fdOrPathOrStream === 'string') try { + this.#fd = fs.openSync(fdOrPathOrStream, 'a+'); + fs.ftruncateSync(this.#fd, 0); + } catch (err) { + throw err as SystemError; + } + else if (typeof fdOrPathOrStream === 'number') { + this.#fd = fdOrPathOrStream; // hope this fd is writable + fs.ftruncateSync(this.#fd, 0); + } + else { + this.#stream = fdOrPathOrStream; + } + } + #fd: number = NaN; + #stream: NodeJS.WritableStream | undefined; + #closed: boolean = false; + #writtenSinceFlush: number = 0; + #totalWritten: number = 0; + + start(options?: { highWaterMark?: number | undefined; } | undefined): void { + return; // TODO + } + + ref(): void { + return; // TODO + } + + unref(): void { + return; // TODO + } + + write(chunk: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number { + if (this.#closed) { + return typeof chunk === 'string' ? chunk.length : chunk.byteLength; + } + if (this.#stream) { + let data; + if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) data = new Uint8Array(chunk); + else if (!(chunk instanceof Uint8Array) && typeof chunk !== 'string') data = new Uint8Array(chunk.buffer); + else data = chunk; + this.#stream.write(data); + const written = typeof data === 'string' ? data.length : data.byteLength; + this.#totalWritten += written; + return written; + } + if (typeof chunk === 'string') { + fs.appendFileSync(this.#fd, chunk, 'utf8'); + this.#writtenSinceFlush += chunk.length; + return chunk.length; + } + if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) fs.appendFileSync(this.#fd, new Uint8Array(chunk)); + else fs.appendFileSync(this.#fd, new Uint8Array(chunk.buffer)); + this.#writtenSinceFlush += chunk.byteLength; + return chunk.byteLength; + } + + //! flushing after writing to a closed FileSink segfaults in Bun but I don't see the need to implement that behavior + flush(): number | Promise<number> { + if (this.#closed) return 0; + // no-op because this is a synchronous implementation + const written = this.#writtenSinceFlush; + this.#writtenSinceFlush = 0; + return written; + } + + //! not sure what to do with this error + end(error?: Error): number | Promise<number> { + if (this.#closed) return this.#totalWritten; + const flushed = this.flush(); + if (this.#stream) { + this.#stream.end(); + this.#closed = true; + return flushed; + } + this.#totalWritten = fs.fstatSync(this.#fd).size; + fs.closeSync(this.#fd); + this.#closed = true; + return flushed; + } +} diff --git a/packages/bun-polyfills/src/modules/bun/hashes.ts b/packages/bun-polyfills/src/modules/bun/hashes.ts index ec254c03a..3b4a8abdf 100644 --- a/packages/bun-polyfills/src/modules/bun/hashes.ts +++ b/packages/bun-polyfills/src/modules/bun/hashes.ts @@ -1,185 +1,185 @@ -import type { CryptoHashInterface, DigestEncoding, Hash } from 'bun';
-import nodecrypto from 'node:crypto';
-import os from 'node:os';
-import md4, { Md4 } from 'js-md4';
-import { wyhash, adler32, crc32, cityhash32, cityhash64, murmur32v3, murmur64v2, murmur32v2 } from '../../../lib/zighash/index.mjs';
-
-export const bunHash = ((data, seed = 0): bigint => wyhash(data, BigInt(seed))) as typeof Bun.hash;
-export const bunHashProto: Hash = {
- wyhash(data, seed = 0n) { return wyhash(data, seed); },
- adler32(data) { return adler32(data); },
- crc32(data) { return crc32(data); },
- cityHash32(data) { return cityhash32(data); },
- cityHash64(data, seed = 0n) { return cityhash64(data, seed); },
- murmur32v3(data, seed = 0) { return murmur32v3(data, seed); },
- murmur32v2(data, seed = 0) { return murmur32v2(data, seed); },
- murmur64v2(data, seed = 0n) { return murmur64v2(data, seed); },
-};
-
-type HashImpl = {
- digest(): Buffer;
- digest(encoding: nodecrypto.BinaryToTextEncoding): string;
- update(data: nodecrypto.BinaryLike): HashImpl;
- update(data: string, inputEncoding: nodecrypto.Encoding): HashImpl;
-};
-abstract class BaseHash<T> implements CryptoHashInterface<T> {
- readonly #hash: HashImpl | null;
- constructor(algorithm: string | HashImpl) {
- if (typeof algorithm === 'string') this.#hash = nodecrypto.createHash(algorithm);
- // If no preset algorithm is given, expect the subclass to fully implement its own.
- else this.#hash = algorithm;
- }
- update(data: StringOrBuffer) {
- if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) this.#hash!.update(new Uint8Array(data));
- else this.#hash!.update(data);
- return this as unknown as T; // is there any good way to do this without asserting?
- }
- digest(encoding: DigestEncoding): string;
- digest(hashInto?: TypedArray): TypedArray;
- digest(encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- if (typeof encodingOrHashInto === 'string') {
- const encoded = this.#hash!.digest(encodingOrHashInto);
- // you'd think node would throw an error if the encoding is invalid, but nope!
- // instead it silently returns as if you passed no encoding and gives a Buffer...
- if (Buffer.isBuffer(encoded)) throw new TypeError(`Unknown encoding: "${encodingOrHashInto}"`);
- else return encoded;
- }
- const digested = this.#hash!.digest();
- if (encodingOrHashInto === undefined) return new Uint8Array(digested.buffer, digested.byteOffset, digested.byteLength);
- if (encodingOrHashInto.byteLength < this.byteLength) throw new TypeError(`TypedArray must be at least ${this.byteLength} bytes`);
- if (encodingOrHashInto instanceof BigInt64Array || encodingOrHashInto instanceof BigUint64Array) {
- // avoid checking endianness for every loop iteration
- const endianAwareInsert = os.endianness() === 'LE'
- ? (arr: string[], j: number, num: string) => arr[7 - j] = num
- : (arr: string[], j: number, num: string) => arr[j] = num;
-
- for (let i = 0; i < digested.byteLength; i += 8) {
- const bigintStrArr = ['', '', '', '', '', '', '', ''];
- for (let j = 0; j < 8; j++) {
- const byte = digested[i + j];
- if (byte === undefined) break;
- endianAwareInsert(bigintStrArr, j, byte.toString(16).padStart(2, '0'));
- }
- encodingOrHashInto[i / 8] = BigInt(`0x${bigintStrArr.join('')}`);
- }
- } else {
- const HashIntoTypedArray = encodingOrHashInto.constructor as TypedArrayConstructor;
- // this will work as long as all hash classes have a byteLength that is a multiple of 4 bytes
- encodingOrHashInto.set(new HashIntoTypedArray(digested.buffer, digested.byteOffset, digested.byteLength / HashIntoTypedArray.BYTES_PER_ELEMENT));
- }
- return encodingOrHashInto;
- }
- static hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { return '' };
- static readonly byteLength: number;
- abstract readonly byteLength: number;
-}
-
-export class SHA1 extends BaseHash<SHA1> {
- constructor() { super('sha1'); }
- static override readonly byteLength = 20;
- override readonly byteLength = 20;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class MD4 extends BaseHash<MD4> {
- constructor() { //! Not supported by nodecrypto
- const hash = md4.create() as unknown as Omit<Md4, 'toString'> & { _update: Md4['update'] };
- function digest(): Buffer;
- function digest(encoding: nodecrypto.BinaryToTextEncoding): string;
- function digest(encoding?: nodecrypto.BinaryToTextEncoding) {
- const buf = Buffer.from(hash.arrayBuffer());
- if (encoding) return buf.toString(encoding);
- else return buf;
- }
- function update(data: nodecrypto.BinaryLike) {
- if (typeof data === 'string') hash._update(data);
- else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) hash._update(new Uint8Array(data));
- else hash._update(new Uint8Array(data.buffer));
- return hash as unknown as MD4HashImpl;
- }
- type MD4HashImpl = Omit<Md4, 'toString'> & { digest: typeof digest, update: typeof update };
- // @ts-expect-error patches to reuse the BaseHash methods
- hash.digest = digest; hash._update = hash.update; hash.update = update;
- super(hash as unknown as MD4HashImpl);
- }
- static override readonly byteLength = 16;
- override readonly byteLength = 16;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class MD5 extends BaseHash<MD5> {
- constructor() { super('md5'); }
- static override readonly byteLength = 16;
- override readonly byteLength = 16;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class SHA224 extends BaseHash<SHA224> {
- constructor() { super('sha224'); }
- static override readonly byteLength = 28;
- override readonly byteLength = 28;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class SHA512 extends BaseHash<SHA512> {
- constructor() { super('sha512'); }
- static override readonly byteLength = 64;
- override readonly byteLength = 64;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class SHA384 extends BaseHash<SHA384> {
- constructor() { super('sha384'); }
- static override readonly byteLength = 48;
- override readonly byteLength = 48;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class SHA256 extends BaseHash<SHA256> {
- constructor() { super('sha256'); }
- static override readonly byteLength = 32;
- override readonly byteLength = 32;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
-export class SHA512_256 extends BaseHash<SHA512_256> {
- constructor() { super('sha512-256'); }
- static override readonly byteLength = 32;
- override readonly byteLength = 32;
- static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
- static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
- static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
- const instance = new this(); instance.update(data);
- return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
- }
-}
+import type { CryptoHashInterface, DigestEncoding, Hash } from 'bun'; +import nodecrypto from 'node:crypto'; +import os from 'node:os'; +import md4, { Md4 } from 'js-md4'; +import { wyhash, adler32, crc32, cityhash32, cityhash64, murmur32v3, murmur64v2, murmur32v2 } from '../../../lib/zighash/index.mjs'; + +export const bunHash = ((data, seed = 0): bigint => wyhash(data, BigInt(seed))) as typeof Bun.hash; +export const bunHashProto: Hash = { + wyhash(data, seed = 0n) { return wyhash(data, seed); }, + adler32(data) { return adler32(data); }, + crc32(data) { return crc32(data); }, + cityHash32(data) { return cityhash32(data); }, + cityHash64(data, seed = 0n) { return cityhash64(data, seed); }, + murmur32v3(data, seed = 0) { return murmur32v3(data, seed); }, + murmur32v2(data, seed = 0) { return murmur32v2(data, seed); }, + murmur64v2(data, seed = 0n) { return murmur64v2(data, seed); }, +}; + +type HashImpl = { + digest(): Buffer; + digest(encoding: nodecrypto.BinaryToTextEncoding): string; + update(data: nodecrypto.BinaryLike): HashImpl; + update(data: string, inputEncoding: nodecrypto.Encoding): HashImpl; +}; +abstract class BaseHash<T> implements CryptoHashInterface<T> { + readonly #hash: HashImpl | null; + constructor(algorithm: string | HashImpl) { + if (typeof algorithm === 'string') this.#hash = nodecrypto.createHash(algorithm); + // If no preset algorithm is given, expect the subclass to fully implement its own. + else this.#hash = algorithm; + } + update(data: StringOrBuffer) { + if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) this.#hash!.update(new Uint8Array(data)); + else this.#hash!.update(data); + return this as unknown as T; // is there any good way to do this without asserting? + } + digest(encoding: DigestEncoding): string; + digest(hashInto?: TypedArray): TypedArray; + digest(encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + if (typeof encodingOrHashInto === 'string') { + const encoded = this.#hash!.digest(encodingOrHashInto); + // you'd think node would throw an error if the encoding is invalid, but nope! + // instead it silently returns as if you passed no encoding and gives a Buffer... + if (Buffer.isBuffer(encoded)) throw new TypeError(`Unknown encoding: "${encodingOrHashInto}"`); + else return encoded; + } + const digested = this.#hash!.digest(); + if (encodingOrHashInto === undefined) return new Uint8Array(digested.buffer, digested.byteOffset, digested.byteLength); + if (encodingOrHashInto.byteLength < this.byteLength) throw new TypeError(`TypedArray must be at least ${this.byteLength} bytes`); + if (encodingOrHashInto instanceof BigInt64Array || encodingOrHashInto instanceof BigUint64Array) { + // avoid checking endianness for every loop iteration + const endianAwareInsert = os.endianness() === 'LE' + ? (arr: string[], j: number, num: string) => arr[7 - j] = num + : (arr: string[], j: number, num: string) => arr[j] = num; + + for (let i = 0; i < digested.byteLength; i += 8) { + const bigintStrArr = ['', '', '', '', '', '', '', '']; + for (let j = 0; j < 8; j++) { + const byte = digested[i + j]; + if (byte === undefined) break; + endianAwareInsert(bigintStrArr, j, byte.toString(16).padStart(2, '0')); + } + encodingOrHashInto[i / 8] = BigInt(`0x${bigintStrArr.join('')}`); + } + } else { + const HashIntoTypedArray = encodingOrHashInto.constructor as TypedArrayConstructor; + // this will work as long as all hash classes have a byteLength that is a multiple of 4 bytes + encodingOrHashInto.set(new HashIntoTypedArray(digested.buffer, digested.byteOffset, digested.byteLength / HashIntoTypedArray.BYTES_PER_ELEMENT)); + } + return encodingOrHashInto; + } + static hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { return '' }; + static readonly byteLength: number; + abstract readonly byteLength: number; +} + +export class SHA1 extends BaseHash<SHA1> { + constructor() { super('sha1'); } + static override readonly byteLength = 20; + override readonly byteLength = 20; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class MD4 extends BaseHash<MD4> { + constructor() { //! Not supported by nodecrypto + const hash = md4.create() as unknown as Omit<Md4, 'toString'> & { _update: Md4['update'] }; + function digest(): Buffer; + function digest(encoding: nodecrypto.BinaryToTextEncoding): string; + function digest(encoding?: nodecrypto.BinaryToTextEncoding) { + const buf = Buffer.from(hash.arrayBuffer()); + if (encoding) return buf.toString(encoding); + else return buf; + } + function update(data: nodecrypto.BinaryLike) { + if (typeof data === 'string') hash._update(data); + else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) hash._update(new Uint8Array(data)); + else hash._update(new Uint8Array(data.buffer)); + return hash as unknown as MD4HashImpl; + } + type MD4HashImpl = Omit<Md4, 'toString'> & { digest: typeof digest, update: typeof update }; + // @ts-expect-error patches to reuse the BaseHash methods + hash.digest = digest; hash._update = hash.update; hash.update = update; + super(hash as unknown as MD4HashImpl); + } + static override readonly byteLength = 16; + override readonly byteLength = 16; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class MD5 extends BaseHash<MD5> { + constructor() { super('md5'); } + static override readonly byteLength = 16; + override readonly byteLength = 16; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class SHA224 extends BaseHash<SHA224> { + constructor() { super('sha224'); } + static override readonly byteLength = 28; + override readonly byteLength = 28; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class SHA512 extends BaseHash<SHA512> { + constructor() { super('sha512'); } + static override readonly byteLength = 64; + override readonly byteLength = 64; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class SHA384 extends BaseHash<SHA384> { + constructor() { super('sha384'); } + static override readonly byteLength = 48; + override readonly byteLength = 48; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class SHA256 extends BaseHash<SHA256> { + constructor() { super('sha256'); } + static override readonly byteLength = 32; + override readonly byteLength = 32; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} +export class SHA512_256 extends BaseHash<SHA512_256> { + constructor() { super('sha512-256'); } + static override readonly byteLength = 32; + override readonly byteLength = 32; + static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string; + static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray; + static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { + const instance = new this(); instance.update(data); + return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray); + } +} diff --git a/packages/bun-polyfills/src/modules/bun/transpiler.ts b/packages/bun-polyfills/src/modules/bun/transpiler.ts index ba33e0653..670201e2c 100644 --- a/packages/bun-polyfills/src/modules/bun/transpiler.ts +++ b/packages/bun-polyfills/src/modules/bun/transpiler.ts @@ -1,96 +1,96 @@ -import type { JavaScriptLoader, TranspilerOptions, Transpiler as BunTranspiler, Import } from 'bun';
-import { transformSync, scan, init } from 'bun-wasm';
-import { Message } from 'bun-wasm/schema';
-import $ from 'chalk';
-
-await init();
-
-enum InternalImportKind {
- 'entry-point' = 1, // entry_point
- 'import-statement' = 2, // stmt
- 'require-call' = 3, // require
- 'dynamic-import' = 4, // dynamic
- 'require-resolve' = 5, // require_resolve
- 'import-rule' = 6, // at
- 'url-token' = 7, // url
- 'internal' = 8, // internal
-}
-
-export type ScanImportsEntry = {
- kind: 'import-statement' | 'dynamic-import';
- path: string;
-};
-
-export default class Transpiler implements BunTranspiler {
- constructor(options?: TranspilerOptions) {
- this.#options = options ?? {};
- this.#rootFile = 'input.tsx'; // + (this.#options.loader ?? 'tsx');
- //? ^ NOTE: with current bun-wasm builds, the loader option is ignored and hardcoded to tsx
- }
- #options: TranspilerOptions;
- #rootFile: string;
- #decoder?: TextDecoder;
- #internallyCalled: boolean = false;
-
- async transform(code: StringOrBuffer, loader: JavaScriptLoader): Promise<string> {
- this.#internallyCalled = true;
- return this.transformSync(code, loader);
- }
-
- transformSync(code: StringOrBuffer, ctx: object): string;
- transformSync(code: StringOrBuffer, loader: JavaScriptLoader, ctx: object): string;
- transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | undefined): string;
- transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | object, ctx: object = {}): string {
- if (!code) return ''; // wasm dies with empty string input
- if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array');
- if (typeof loader !== 'string') loader = this.#options.loader;
- const result = transformSync(code, this.#rootFile, loader);
- // status 1 = success, status 2 = error
- if (result.status === 2) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.transform : this.transformSync);
- this.#internallyCalled = false;
- this.#decoder ??= new TextDecoder();
- return this.#decoder.decode(result.files[0].data);
- }
-
- scan(code: StringOrBuffer): { exports: string[]; imports: Import[]; } {
- if (!code) return { exports: [], imports: [] }; // wasm dies with empty string input
- if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array');
-
- const result = scan(code, this.#rootFile, this.#options.loader);
- if (result.errors.length) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.scanImports : this.scan);
- this.#internallyCalled = false;
-
- result.imports.forEach(imp => (imp.kind as unknown) = InternalImportKind[imp.kind]);
- return {
- exports: result.exports,
- imports: result.imports as unknown as Import[],
- };
- }
-
- scanImports(code: StringOrBuffer): ScanImportsEntry[] {
- this.#internallyCalled = true;
- return this.scan(code).imports.filter(imp => imp.kind === 'import-statement' || imp.kind === 'dynamic-import') as ScanImportsEntry[];
- }
-}
-
-function formatBuildErrors(buildErrors: Message[], caller: Transpiler[keyof Transpiler]): AggregateError {
- const formatted = buildErrors.map(err => {
- const loc = err.data.location;
- const str = `${$.redBright('error')}${$.gray(':')} ${$.bold(err.data.text)}\n` +
- (loc
- ? `${highlightErrorChar(loc.line_text, loc.offset)}\n` +
- $.redBright.bold('^'.padStart(loc.column)) + '\n' +
- `${$.bold(loc.file)}${$.gray(':')}${$.yellowBright(loc.line)}${$.gray(':')}${$.yellowBright(loc.column)} ${$.gray(loc.offset)}`
- : ''
- );
- return { __proto__: Error.prototype, stack: str };
- });
- const aggregate = new AggregateError(formatted, `Input code has ${formatted.length} error${formatted.length === 1 ? '' : 's'}`);
- Error.captureStackTrace(aggregate, caller);
- aggregate.name = 'BuildError';
- return aggregate;
-}
-
-function highlightErrorChar(str: string, at: number): string {
- return str.slice(0, at) + $.red(str[at]) + str.slice(at + 1);
-}
+import type { JavaScriptLoader, TranspilerOptions, Transpiler as BunTranspiler, Import } from 'bun'; +import { transformSync, scan, init } from 'bun-wasm'; +import { Message } from 'bun-wasm/schema'; +import $ from 'chalk'; + +await init(); + +enum InternalImportKind { + 'entry-point' = 1, // entry_point + 'import-statement' = 2, // stmt + 'require-call' = 3, // require + 'dynamic-import' = 4, // dynamic + 'require-resolve' = 5, // require_resolve + 'import-rule' = 6, // at + 'url-token' = 7, // url + 'internal' = 8, // internal +} + +export type ScanImportsEntry = { + kind: 'import-statement' | 'dynamic-import'; + path: string; +}; + +export default class Transpiler implements BunTranspiler { + constructor(options?: TranspilerOptions) { + this.#options = options ?? {}; + this.#rootFile = 'input.tsx'; // + (this.#options.loader ?? 'tsx'); + //? ^ NOTE: with current bun-wasm builds, the loader option is ignored and hardcoded to tsx + } + #options: TranspilerOptions; + #rootFile: string; + #decoder?: TextDecoder; + #internallyCalled: boolean = false; + + async transform(code: StringOrBuffer, loader: JavaScriptLoader): Promise<string> { + this.#internallyCalled = true; + return this.transformSync(code, loader); + } + + transformSync(code: StringOrBuffer, ctx: object): string; + transformSync(code: StringOrBuffer, loader: JavaScriptLoader, ctx: object): string; + transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | undefined): string; + transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | object, ctx: object = {}): string { + if (!code) return ''; // wasm dies with empty string input + if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array'); + if (typeof loader !== 'string') loader = this.#options.loader; + const result = transformSync(code, this.#rootFile, loader); + // status 1 = success, status 2 = error + if (result.status === 2) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.transform : this.transformSync); + this.#internallyCalled = false; + this.#decoder ??= new TextDecoder(); + return this.#decoder.decode(result.files[0].data); + } + + scan(code: StringOrBuffer): { exports: string[]; imports: Import[]; } { + if (!code) return { exports: [], imports: [] }; // wasm dies with empty string input + if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array'); + + const result = scan(code, this.#rootFile, this.#options.loader); + if (result.errors.length) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.scanImports : this.scan); + this.#internallyCalled = false; + + result.imports.forEach(imp => (imp.kind as unknown) = InternalImportKind[imp.kind]); + return { + exports: result.exports, + imports: result.imports as unknown as Import[], + }; + } + + scanImports(code: StringOrBuffer): ScanImportsEntry[] { + this.#internallyCalled = true; + return this.scan(code).imports.filter(imp => imp.kind === 'import-statement' || imp.kind === 'dynamic-import') as ScanImportsEntry[]; + } +} + +function formatBuildErrors(buildErrors: Message[], caller: Transpiler[keyof Transpiler]): AggregateError { + const formatted = buildErrors.map(err => { + const loc = err.data.location; + const str = `${$.redBright('error')}${$.gray(':')} ${$.bold(err.data.text)}\n` + + (loc + ? `${highlightErrorChar(loc.line_text, loc.offset)}\n` + + $.redBright.bold('^'.padStart(loc.column)) + '\n' + + `${$.bold(loc.file)}${$.gray(':')}${$.yellowBright(loc.line)}${$.gray(':')}${$.yellowBright(loc.column)} ${$.gray(loc.offset)}` + : '' + ); + return { __proto__: Error.prototype, stack: str }; + }); + const aggregate = new AggregateError(formatted, `Input code has ${formatted.length} error${formatted.length === 1 ? '' : 's'}`); + Error.captureStackTrace(aggregate, caller); + aggregate.name = 'BuildError'; + return aggregate; +} + +function highlightErrorChar(str: string, at: number): string { + return str.slice(0, at) + $.red(str[at]) + str.slice(at + 1); +} diff --git a/packages/bun-polyfills/src/types/md4.d.ts b/packages/bun-polyfills/src/types/md4.d.ts index 30413396e..68d01923c 100644 --- a/packages/bun-polyfills/src/types/md4.d.ts +++ b/packages/bun-polyfills/src/types/md4.d.ts @@ -1,72 +1,72 @@ -declare module 'js-md4' {
- export type MD4Input = string | ArrayBuffer | Uint8Array | number[];
-
- interface md4 {
- /**
- * # Broken, will throw an error.
- * @deprecated Use {@link md4.hex} instead.
- */
- (input: MD4Input): never;
- /** Creates an `Md4` hasher instance. */
- create(): Md4;
- /** Shortcut for `md4.create().update(...)` */
- update(message: MD4Input): Md4;
- /** Hash `message` into a hex string. */
- hex(message: MD4Input): string;
- /** Hash `message` into an Array. */
- array(message: MD4Input): number[];
- /** Identical to {@link md4.array}. */
- digest(message: MD4Input): number[];
- /**
- * Identical to {@link md4.arrayBuffer}.
- * @deprecated Use {@link md4.arrayBuffer} instead.
- */
- buffer(message: MD4Input): ArrayBuffer;
- /** Hash `message` into an ArrayBuffer. */
- arrayBuffer(message: MD4Input): ArrayBuffer;
- }
-
- export type Md4 = Md4;
- declare class Md4 {
- private constructor();
-
- private toString(): string;
- private finalize(): void;
- private hash(): void;
- /**
- * Append `message` to the internal hash source data.
- * @returns A reference to `this` for chaining, or nothing if the instance has been finalized.
- */
- update(message: MD4Input): this | void;
- /** Hash into a hex string. Finalizes the hash. */
- hex(): string;
- /** Hash into an Array. Finalizes the hash. */
- array(): number[];
- /** Identical to {@link Md4.array}. */
- digest(): number[];
- /**
- * Identical to {@link Md4.arrayBuffer}.
- * @deprecated Use {@link Md4.arrayBuffer} instead.
- */
- buffer(): ArrayBuffer;
- /** Hash into an ArrayBuffer. Finalizes the hash. */
- arrayBuffer(): ArrayBuffer;
-
- private buffer8: Uint8Array;
- private blocks: Uint32Array;
- private bytes: number;
- private start: number;
- private h3: number;
- private h2: number;
- private h1: number;
- private h0: number;
- readonly hashed: boolean;
- /** If true, `update()` operations will silently fail. */
- readonly finalized: boolean;
- readonly first: boolean;
- private lastByteIndex?: number;
- }
-
- const md4: md4;
- export default md4;
-}
+declare module 'js-md4' { + export type MD4Input = string | ArrayBuffer | Uint8Array | number[]; + + interface md4 { + /** + * # Broken, will throw an error. + * @deprecated Use {@link md4.hex} instead. + */ + (input: MD4Input): never; + /** Creates an `Md4` hasher instance. */ + create(): Md4; + /** Shortcut for `md4.create().update(...)` */ + update(message: MD4Input): Md4; + /** Hash `message` into a hex string. */ + hex(message: MD4Input): string; + /** Hash `message` into an Array. */ + array(message: MD4Input): number[]; + /** Identical to {@link md4.array}. */ + digest(message: MD4Input): number[]; + /** + * Identical to {@link md4.arrayBuffer}. + * @deprecated Use {@link md4.arrayBuffer} instead. + */ + buffer(message: MD4Input): ArrayBuffer; + /** Hash `message` into an ArrayBuffer. */ + arrayBuffer(message: MD4Input): ArrayBuffer; + } + + export type Md4 = Md4; + declare class Md4 { + private constructor(); + + private toString(): string; + private finalize(): void; + private hash(): void; + /** + * Append `message` to the internal hash source data. + * @returns A reference to `this` for chaining, or nothing if the instance has been finalized. + */ + update(message: MD4Input): this | void; + /** Hash into a hex string. Finalizes the hash. */ + hex(): string; + /** Hash into an Array. Finalizes the hash. */ + array(): number[]; + /** Identical to {@link Md4.array}. */ + digest(): number[]; + /** + * Identical to {@link Md4.arrayBuffer}. + * @deprecated Use {@link Md4.arrayBuffer} instead. + */ + buffer(): ArrayBuffer; + /** Hash into an ArrayBuffer. Finalizes the hash. */ + arrayBuffer(): ArrayBuffer; + + private buffer8: Uint8Array; + private blocks: Uint32Array; + private bytes: number; + private start: number; + private h3: number; + private h2: number; + private h1: number; + private h0: number; + readonly hashed: boolean; + /** If true, `update()` operations will silently fail. */ + readonly finalized: boolean; + readonly first: boolean; + private lastByteIndex?: number; + } + + const md4: md4; + export default md4; +} diff --git a/packages/bun-polyfills/src/utils/errors.ts b/packages/bun-polyfills/src/utils/errors.ts index 35b967436..160229065 100644 --- a/packages/bun-polyfills/src/utils/errors.ts +++ b/packages/bun-polyfills/src/utils/errors.ts @@ -1,230 +1,230 @@ -type PosixErrNo = MapKeysType<ReturnType<typeof getPosixSystemErrorMap>>;
-type Win32ErrNo = MapKeysType<ReturnType<typeof getWin32SystemErrorMap>>;
-
-export function getCallSites(sliceOff = 1) {
- const originalPST = Error.prepareStackTrace;
- Error.prepareStackTrace = (error, stack) => stack;
- const { stack } = new Error();
- if (stack?.constructor.name !== 'Array') throw new Error('Failed to acquire structured JS stack trace');
- Error.prepareStackTrace = originalPST;
- return (stack as unknown as NodeJS.CallSite[]).slice(sliceOff);
-}
-
-export function getPosixSystemErrorMap() {
- return new Map([
- [ -7, [ 'E2BIG', 'argument list too long' ] ],
- [ -13, [ 'EACCES', 'permission denied' ] ],
- [ -98, [ 'EADDRINUSE', 'address already in use' ] ],
- [ -99, [ 'EADDRNOTAVAIL', 'address not available' ] ],
- [ -97, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
- [ -11, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
- [ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
- [ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
- [ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
- [ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
- [ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
- [ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
- [ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
- [ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
- [ -3007, [ 'EAI_NODATA', 'no address' ] ],
- [ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
- [ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
- [ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
- [ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
- [ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
- [ -114, [ 'EALREADY', 'connection already in progress' ] ],
- [ -9, [ 'EBADF', 'bad file descriptor' ] ],
- [ -16, [ 'EBUSY', 'resource busy or locked' ] ],
- [ -125, [ 'ECANCELED', 'operation canceled' ] ],
- [ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
- [ -103, [ 'ECONNABORTED', 'software caused connection abort' ] ],
- [ -111, [ 'ECONNREFUSED', 'connection refused' ] ],
- [ -104, [ 'ECONNRESET', 'connection reset by peer' ] ],
- [ -89, [ 'EDESTADDRREQ', 'destination address required' ] ],
- [ -17, [ 'EEXIST', 'file already exists' ] ],
- [ -14, [ 'EFAULT', 'bad address in system call argument' ] ],
- [ -27, [ 'EFBIG', 'file too large' ] ],
- [ -113, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
- [ -4, [ 'EINTR', 'interrupted system call' ] ],
- [ -22, [ 'EINVAL', 'invalid argument' ] ],
- [ -5, [ 'EIO', 'i/o error' ] ],
- [ -106, [ 'EISCONN', 'socket is already connected' ] ],
- [ -21, [ 'EISDIR', 'illegal operation on a directory' ] ],
- [ -40, [ 'ELOOP', 'too many symbolic links encountered' ] ],
- [ -24, [ 'EMFILE', 'too many open files' ] ],
- [ -90, [ 'EMSGSIZE', 'message too long' ] ],
- [ -36, [ 'ENAMETOOLONG', 'name too long' ] ],
- [ -100, [ 'ENETDOWN', 'network is down' ] ],
- [ -101, [ 'ENETUNREACH', 'network is unreachable' ] ],
- [ -23, [ 'ENFILE', 'file table overflow' ] ],
- [ -105, [ 'ENOBUFS', 'no buffer space available' ] ],
- [ -19, [ 'ENODEV', 'no such device' ] ],
- [ -2, [ 'ENOENT', 'no such file or directory' ] ],
- [ -12, [ 'ENOMEM', 'not enough memory' ] ],
- [ -64, [ 'ENONET', 'machine is not on the network' ] ],
- [ -92, [ 'ENOPROTOOPT', 'protocol not available' ] ],
- [ -28, [ 'ENOSPC', 'no space left on device' ] ],
- [ -38, [ 'ENOSYS', 'function not implemented' ] ],
- [ -107, [ 'ENOTCONN', 'socket is not connected' ] ],
- [ -20, [ 'ENOTDIR', 'not a directory' ] ],
- [ -39, [ 'ENOTEMPTY', 'directory not empty' ] ],
- [ -88, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
- [ -95, [ 'ENOTSUP', 'operation not supported on socket' ] ],
- [ -75, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
- [ -1, [ 'EPERM', 'operation not permitted' ] ],
- [ -32, [ 'EPIPE', 'broken pipe' ] ],
- [ -71, [ 'EPROTO', 'protocol error' ] ],
- [ -93, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
- [ -91, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
- [ -34, [ 'ERANGE', 'result too large' ] ],
- [ -30, [ 'EROFS', 'read-only file system' ] ],
- [ -108, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
- [ -29, [ 'ESPIPE', 'invalid seek' ] ],
- [ -3, [ 'ESRCH', 'no such process' ] ],
- [ -110, [ 'ETIMEDOUT', 'connection timed out' ] ],
- [ -26, [ 'ETXTBSY', 'text file is busy' ] ],
- [ -18, [ 'EXDEV', 'cross-device link not permitted' ] ],
- [ -4094, [ 'UNKNOWN', 'unknown error' ] ],
- [ -4095, [ 'EOF', 'end of file' ] ],
- [ -6, [ 'ENXIO', 'no such device or address' ] ],
- [ -31, [ 'EMLINK', 'too many links' ] ],
- [ -112, [ 'EHOSTDOWN', 'host is down' ] ],
- [ -121, [ 'EREMOTEIO', 'remote I/O error' ] ],
- [ -25, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
- [ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
- [ -84, [ 'EILSEQ', 'illegal byte sequence' ] ],
- [ -94, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
- ] as const);
-}
-
-export function getWin32SystemErrorMap() {
- return new Map([
- [ -4093, [ 'E2BIG', 'argument list too long' ] ],
- [ -4092, [ 'EACCES', 'permission denied' ] ],
- [ -4091, [ 'EADDRINUSE', 'address already in use' ] ],
- [ -4090, [ 'EADDRNOTAVAIL', 'address not available' ] ],
- [ -4089, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
- [ -4088, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
- [ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
- [ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
- [ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
- [ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
- [ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
- [ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
- [ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
- [ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
- [ -3007, [ 'EAI_NODATA', 'no address' ] ],
- [ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
- [ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
- [ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
- [ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
- [ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
- [ -4084, [ 'EALREADY', 'connection already in progress' ] ],
- [ -4083, [ 'EBADF', 'bad file descriptor' ] ],
- [ -4082, [ 'EBUSY', 'resource busy or locked' ] ],
- [ -4081, [ 'ECANCELED', 'operation canceled' ] ],
- [ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
- [ -4079, [ 'ECONNABORTED', 'software caused connection abort' ] ],
- [ -4078, [ 'ECONNREFUSED', 'connection refused' ] ],
- [ -4077, [ 'ECONNRESET', 'connection reset by peer' ] ],
- [ -4076, [ 'EDESTADDRREQ', 'destination address required' ] ],
- [ -4075, [ 'EEXIST', 'file already exists' ] ],
- [ -4074, [ 'EFAULT', 'bad address in system call argument' ] ],
- [ -4036, [ 'EFBIG', 'file too large' ] ],
- [ -4073, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
- [ -4072, [ 'EINTR', 'interrupted system call' ] ],
- [ -4071, [ 'EINVAL', 'invalid argument' ] ],
- [ -4070, [ 'EIO', 'i/o error' ] ],
- [ -4069, [ 'EISCONN', 'socket is already connected' ] ],
- [ -4068, [ 'EISDIR', 'illegal operation on a directory' ] ],
- [ -4067, [ 'ELOOP', 'too many symbolic links encountered' ] ],
- [ -4066, [ 'EMFILE', 'too many open files' ] ],
- [ -4065, [ 'EMSGSIZE', 'message too long' ] ],
- [ -4064, [ 'ENAMETOOLONG', 'name too long' ] ],
- [ -4063, [ 'ENETDOWN', 'network is down' ] ],
- [ -4062, [ 'ENETUNREACH', 'network is unreachable' ] ],
- [ -4061, [ 'ENFILE', 'file table overflow' ] ],
- [ -4060, [ 'ENOBUFS', 'no buffer space available' ] ],
- [ -4059, [ 'ENODEV', 'no such device' ] ],
- [ -4058, [ 'ENOENT', 'no such file or directory' ] ],
- [ -4057, [ 'ENOMEM', 'not enough memory' ] ],
- [ -4056, [ 'ENONET', 'machine is not on the network' ] ],
- [ -4035, [ 'ENOPROTOOPT', 'protocol not available' ] ],
- [ -4055, [ 'ENOSPC', 'no space left on device' ] ],
- [ -4054, [ 'ENOSYS', 'function not implemented' ] ],
- [ -4053, [ 'ENOTCONN', 'socket is not connected' ] ],
- [ -4052, [ 'ENOTDIR', 'not a directory' ] ],
- [ -4051, [ 'ENOTEMPTY', 'directory not empty' ] ],
- [ -4050, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
- [ -4049, [ 'ENOTSUP', 'operation not supported on socket' ] ],
- [ -4026, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
- [ -4048, [ 'EPERM', 'operation not permitted' ] ],
- [ -4047, [ 'EPIPE', 'broken pipe' ] ],
- [ -4046, [ 'EPROTO', 'protocol error' ] ],
- [ -4045, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
- [ -4044, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
- [ -4034, [ 'ERANGE', 'result too large' ] ],
- [ -4043, [ 'EROFS', 'read-only file system' ] ],
- [ -4042, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
- [ -4041, [ 'ESPIPE', 'invalid seek' ] ],
- [ -4040, [ 'ESRCH', 'no such process' ] ],
- [ -4039, [ 'ETIMEDOUT', 'connection timed out' ] ],
- [ -4038, [ 'ETXTBSY', 'text file is busy' ] ],
- [ -4037, [ 'EXDEV', 'cross-device link not permitted' ] ],
- [ -4094, [ 'UNKNOWN', 'unknown error' ] ],
- [ -4095, [ 'EOF', 'end of file' ] ],
- [ -4033, [ 'ENXIO', 'no such device or address' ] ],
- [ -4032, [ 'EMLINK', 'too many links' ] ],
- [ -4031, [ 'EHOSTDOWN', 'host is down' ] ],
- [ -4030, [ 'EREMOTEIO', 'remote I/O error' ] ],
- [ -4029, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
- [ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
- [ -4027, [ 'EILSEQ', 'illegal byte sequence' ] ],
- [ -4025, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
- ] as const);
-}
-
-export function getPosixToWin32SystemErrorMap() {
- const posixEntries = [...getPosixSystemErrorMap().entries()];
- const win32Entries = [...getWin32SystemErrorMap().entries()];
- const map: Map<PosixErrNo, Win32ErrNo> = new Map();
- posixEntries.forEach(([code, val]) => {
- const found = win32Entries.find(([_, v]) => v[0] === val[0]);
- if (!found) console.error(val[0]);
- else map.set(code, found[0]);
- });
- return map;
-}
-
-export function getPlatformSystemErrorFromPosix(posixErrNo: PosixErrNo) {
- if (process.platform === 'win32') {
- const win32errno = getPosixToWin32SystemErrorMap().get(posixErrNo)!;
- return getWin32SystemErrorMap().get(win32errno);
- } else {
- return getPosixSystemErrorMap().get(posixErrNo);
- }
-}
-
-export class SystemError extends Error {
- constructor(errno: PosixErrNo, syscall?: string, errpath?: string) {
- const [errname, errmsg] = getPlatformSystemErrorFromPosix(errno) ?? ['SystemError', 'Unknown system error'];
- super(errmsg);
- this.name = errname;
- this.code = errname;
- this.errno = errno;
- if (syscall) this.syscall = syscall;
- if (errpath) this.path = errpath;
- }
- errno?: number | undefined;
- code?: string | undefined;
- path?: string | undefined;
- syscall?: string | undefined;
-}
-
-export class NotImplementedError extends Error {
- constructor(thing: string, func: AnyCallable = NotImplementedError, overrideMsg: boolean = false) {
- super(overrideMsg ? thing : `A polyfill for ${thing} is not yet implemented by bun-polyfills.`);
- this.name = 'NotImplementedError';
- Error.captureStackTrace(this, func);
- }
-}
+type PosixErrNo = MapKeysType<ReturnType<typeof getPosixSystemErrorMap>>; +type Win32ErrNo = MapKeysType<ReturnType<typeof getWin32SystemErrorMap>>; + +export function getCallSites(sliceOff = 1) { + const originalPST = Error.prepareStackTrace; + Error.prepareStackTrace = (error, stack) => stack; + const { stack } = new Error(); + if (stack?.constructor.name !== 'Array') throw new Error('Failed to acquire structured JS stack trace'); + Error.prepareStackTrace = originalPST; + return (stack as unknown as NodeJS.CallSite[]).slice(sliceOff); +} + +export function getPosixSystemErrorMap() { + return new Map([ + [ -7, [ 'E2BIG', 'argument list too long' ] ], + [ -13, [ 'EACCES', 'permission denied' ] ], + [ -98, [ 'EADDRINUSE', 'address already in use' ] ], + [ -99, [ 'EADDRNOTAVAIL', 'address not available' ] ], + [ -97, [ 'EAFNOSUPPORT', 'address family not supported' ] ], + [ -11, [ 'EAGAIN', 'resource temporarily unavailable' ] ], + [ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ], + [ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ], + [ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ], + [ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ], + [ -3003, [ 'EAI_CANCELED', 'request canceled' ] ], + [ -3004, [ 'EAI_FAIL', 'permanent failure' ] ], + [ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ], + [ -3006, [ 'EAI_MEMORY', 'out of memory' ] ], + [ -3007, [ 'EAI_NODATA', 'no address' ] ], + [ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ], + [ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ], + [ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ], + [ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ], + [ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ], + [ -114, [ 'EALREADY', 'connection already in progress' ] ], + [ -9, [ 'EBADF', 'bad file descriptor' ] ], + [ -16, [ 'EBUSY', 'resource busy or locked' ] ], + [ -125, [ 'ECANCELED', 'operation canceled' ] ], + [ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ], + [ -103, [ 'ECONNABORTED', 'software caused connection abort' ] ], + [ -111, [ 'ECONNREFUSED', 'connection refused' ] ], + [ -104, [ 'ECONNRESET', 'connection reset by peer' ] ], + [ -89, [ 'EDESTADDRREQ', 'destination address required' ] ], + [ -17, [ 'EEXIST', 'file already exists' ] ], + [ -14, [ 'EFAULT', 'bad address in system call argument' ] ], + [ -27, [ 'EFBIG', 'file too large' ] ], + [ -113, [ 'EHOSTUNREACH', 'host is unreachable' ] ], + [ -4, [ 'EINTR', 'interrupted system call' ] ], + [ -22, [ 'EINVAL', 'invalid argument' ] ], + [ -5, [ 'EIO', 'i/o error' ] ], + [ -106, [ 'EISCONN', 'socket is already connected' ] ], + [ -21, [ 'EISDIR', 'illegal operation on a directory' ] ], + [ -40, [ 'ELOOP', 'too many symbolic links encountered' ] ], + [ -24, [ 'EMFILE', 'too many open files' ] ], + [ -90, [ 'EMSGSIZE', 'message too long' ] ], + [ -36, [ 'ENAMETOOLONG', 'name too long' ] ], + [ -100, [ 'ENETDOWN', 'network is down' ] ], + [ -101, [ 'ENETUNREACH', 'network is unreachable' ] ], + [ -23, [ 'ENFILE', 'file table overflow' ] ], + [ -105, [ 'ENOBUFS', 'no buffer space available' ] ], + [ -19, [ 'ENODEV', 'no such device' ] ], + [ -2, [ 'ENOENT', 'no such file or directory' ] ], + [ -12, [ 'ENOMEM', 'not enough memory' ] ], + [ -64, [ 'ENONET', 'machine is not on the network' ] ], + [ -92, [ 'ENOPROTOOPT', 'protocol not available' ] ], + [ -28, [ 'ENOSPC', 'no space left on device' ] ], + [ -38, [ 'ENOSYS', 'function not implemented' ] ], + [ -107, [ 'ENOTCONN', 'socket is not connected' ] ], + [ -20, [ 'ENOTDIR', 'not a directory' ] ], + [ -39, [ 'ENOTEMPTY', 'directory not empty' ] ], + [ -88, [ 'ENOTSOCK', 'socket operation on non-socket' ] ], + [ -95, [ 'ENOTSUP', 'operation not supported on socket' ] ], + [ -75, [ 'EOVERFLOW', 'value too large for defined data type' ] ], + [ -1, [ 'EPERM', 'operation not permitted' ] ], + [ -32, [ 'EPIPE', 'broken pipe' ] ], + [ -71, [ 'EPROTO', 'protocol error' ] ], + [ -93, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ], + [ -91, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ], + [ -34, [ 'ERANGE', 'result too large' ] ], + [ -30, [ 'EROFS', 'read-only file system' ] ], + [ -108, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ], + [ -29, [ 'ESPIPE', 'invalid seek' ] ], + [ -3, [ 'ESRCH', 'no such process' ] ], + [ -110, [ 'ETIMEDOUT', 'connection timed out' ] ], + [ -26, [ 'ETXTBSY', 'text file is busy' ] ], + [ -18, [ 'EXDEV', 'cross-device link not permitted' ] ], + [ -4094, [ 'UNKNOWN', 'unknown error' ] ], + [ -4095, [ 'EOF', 'end of file' ] ], + [ -6, [ 'ENXIO', 'no such device or address' ] ], + [ -31, [ 'EMLINK', 'too many links' ] ], + [ -112, [ 'EHOSTDOWN', 'host is down' ] ], + [ -121, [ 'EREMOTEIO', 'remote I/O error' ] ], + [ -25, [ 'ENOTTY', 'inappropriate ioctl for device' ] ], + [ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ], + [ -84, [ 'EILSEQ', 'illegal byte sequence' ] ], + [ -94, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ] + ] as const); +} + +export function getWin32SystemErrorMap() { + return new Map([ + [ -4093, [ 'E2BIG', 'argument list too long' ] ], + [ -4092, [ 'EACCES', 'permission denied' ] ], + [ -4091, [ 'EADDRINUSE', 'address already in use' ] ], + [ -4090, [ 'EADDRNOTAVAIL', 'address not available' ] ], + [ -4089, [ 'EAFNOSUPPORT', 'address family not supported' ] ], + [ -4088, [ 'EAGAIN', 'resource temporarily unavailable' ] ], + [ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ], + [ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ], + [ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ], + [ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ], + [ -3003, [ 'EAI_CANCELED', 'request canceled' ] ], + [ -3004, [ 'EAI_FAIL', 'permanent failure' ] ], + [ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ], + [ -3006, [ 'EAI_MEMORY', 'out of memory' ] ], + [ -3007, [ 'EAI_NODATA', 'no address' ] ], + [ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ], + [ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ], + [ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ], + [ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ], + [ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ], + [ -4084, [ 'EALREADY', 'connection already in progress' ] ], + [ -4083, [ 'EBADF', 'bad file descriptor' ] ], + [ -4082, [ 'EBUSY', 'resource busy or locked' ] ], + [ -4081, [ 'ECANCELED', 'operation canceled' ] ], + [ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ], + [ -4079, [ 'ECONNABORTED', 'software caused connection abort' ] ], + [ -4078, [ 'ECONNREFUSED', 'connection refused' ] ], + [ -4077, [ 'ECONNRESET', 'connection reset by peer' ] ], + [ -4076, [ 'EDESTADDRREQ', 'destination address required' ] ], + [ -4075, [ 'EEXIST', 'file already exists' ] ], + [ -4074, [ 'EFAULT', 'bad address in system call argument' ] ], + [ -4036, [ 'EFBIG', 'file too large' ] ], + [ -4073, [ 'EHOSTUNREACH', 'host is unreachable' ] ], + [ -4072, [ 'EINTR', 'interrupted system call' ] ], + [ -4071, [ 'EINVAL', 'invalid argument' ] ], + [ -4070, [ 'EIO', 'i/o error' ] ], + [ -4069, [ 'EISCONN', 'socket is already connected' ] ], + [ -4068, [ 'EISDIR', 'illegal operation on a directory' ] ], + [ -4067, [ 'ELOOP', 'too many symbolic links encountered' ] ], + [ -4066, [ 'EMFILE', 'too many open files' ] ], + [ -4065, [ 'EMSGSIZE', 'message too long' ] ], + [ -4064, [ 'ENAMETOOLONG', 'name too long' ] ], + [ -4063, [ 'ENETDOWN', 'network is down' ] ], + [ -4062, [ 'ENETUNREACH', 'network is unreachable' ] ], + [ -4061, [ 'ENFILE', 'file table overflow' ] ], + [ -4060, [ 'ENOBUFS', 'no buffer space available' ] ], + [ -4059, [ 'ENODEV', 'no such device' ] ], + [ -4058, [ 'ENOENT', 'no such file or directory' ] ], + [ -4057, [ 'ENOMEM', 'not enough memory' ] ], + [ -4056, [ 'ENONET', 'machine is not on the network' ] ], + [ -4035, [ 'ENOPROTOOPT', 'protocol not available' ] ], + [ -4055, [ 'ENOSPC', 'no space left on device' ] ], + [ -4054, [ 'ENOSYS', 'function not implemented' ] ], + [ -4053, [ 'ENOTCONN', 'socket is not connected' ] ], + [ -4052, [ 'ENOTDIR', 'not a directory' ] ], + [ -4051, [ 'ENOTEMPTY', 'directory not empty' ] ], + [ -4050, [ 'ENOTSOCK', 'socket operation on non-socket' ] ], + [ -4049, [ 'ENOTSUP', 'operation not supported on socket' ] ], + [ -4026, [ 'EOVERFLOW', 'value too large for defined data type' ] ], + [ -4048, [ 'EPERM', 'operation not permitted' ] ], + [ -4047, [ 'EPIPE', 'broken pipe' ] ], + [ -4046, [ 'EPROTO', 'protocol error' ] ], + [ -4045, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ], + [ -4044, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ], + [ -4034, [ 'ERANGE', 'result too large' ] ], + [ -4043, [ 'EROFS', 'read-only file system' ] ], + [ -4042, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ], + [ -4041, [ 'ESPIPE', 'invalid seek' ] ], + [ -4040, [ 'ESRCH', 'no such process' ] ], + [ -4039, [ 'ETIMEDOUT', 'connection timed out' ] ], + [ -4038, [ 'ETXTBSY', 'text file is busy' ] ], + [ -4037, [ 'EXDEV', 'cross-device link not permitted' ] ], + [ -4094, [ 'UNKNOWN', 'unknown error' ] ], + [ -4095, [ 'EOF', 'end of file' ] ], + [ -4033, [ 'ENXIO', 'no such device or address' ] ], + [ -4032, [ 'EMLINK', 'too many links' ] ], + [ -4031, [ 'EHOSTDOWN', 'host is down' ] ], + [ -4030, [ 'EREMOTEIO', 'remote I/O error' ] ], + [ -4029, [ 'ENOTTY', 'inappropriate ioctl for device' ] ], + [ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ], + [ -4027, [ 'EILSEQ', 'illegal byte sequence' ] ], + [ -4025, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ] + ] as const); +} + +export function getPosixToWin32SystemErrorMap() { + const posixEntries = [...getPosixSystemErrorMap().entries()]; + const win32Entries = [...getWin32SystemErrorMap().entries()]; + const map: Map<PosixErrNo, Win32ErrNo> = new Map(); + posixEntries.forEach(([code, val]) => { + const found = win32Entries.find(([_, v]) => v[0] === val[0]); + if (!found) console.error(val[0]); + else map.set(code, found[0]); + }); + return map; +} + +export function getPlatformSystemErrorFromPosix(posixErrNo: PosixErrNo) { + if (process.platform === 'win32') { + const win32errno = getPosixToWin32SystemErrorMap().get(posixErrNo)!; + return getWin32SystemErrorMap().get(win32errno); + } else { + return getPosixSystemErrorMap().get(posixErrNo); + } +} + +export class SystemError extends Error { + constructor(errno: PosixErrNo, syscall?: string, errpath?: string) { + const [errname, errmsg] = getPlatformSystemErrorFromPosix(errno) ?? ['SystemError', 'Unknown system error']; + super(errmsg); + this.name = errname; + this.code = errname; + this.errno = errno; + if (syscall) this.syscall = syscall; + if (errpath) this.path = errpath; + } + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; +} + +export class NotImplementedError extends Error { + constructor(thing: string, func: AnyCallable = NotImplementedError, overrideMsg: boolean = false) { + super(overrideMsg ? thing : `A polyfill for ${thing} is not yet implemented by bun-polyfills.`); + this.name = 'NotImplementedError'; + Error.captureStackTrace(this, func); + } +} diff --git a/packages/bun-polyfills/src/utils/misc.ts b/packages/bun-polyfills/src/utils/misc.ts index 74bb9aa01..c940535fc 100644 --- a/packages/bun-polyfills/src/utils/misc.ts +++ b/packages/bun-polyfills/src/utils/misc.ts @@ -1,36 +1,36 @@ -import streams from 'node:stream';
-import type { SpawnOptions, FileBlob } from 'bun';
-
-export const getter = <T>(obj: T, key: string | symbol, get: () => any, enumerable = false, configurable = true): void => {
- Object.defineProperty(obj, key, { get, configurable, enumerable });
-};
-
-export const setter = <T>(obj: T, key: string | symbol, set: () => any, enumerable = false, configurable = true): void => {
- Object.defineProperty(obj, key, { set, configurable, enumerable });
-};
-
-export const readonly = <T>(obj: T, key: string | symbol, value: unknown, enumerable = false, configurable = true): void => {
- Object.defineProperty(obj, key, { value, configurable, enumerable });
-};
-
-export function streamToBuffer(stream: streams.Readable | streams.Duplex): Promise<Buffer> {
- return new Promise((resolve, reject) => {
- const buffers: Uint8Array[] = [];
- stream.on("data", (chunk: Uint8Array) => buffers.push(chunk));
- stream.on("end", () => resolve(Buffer.concat(buffers)));
- stream.on("error", (err: Error) => reject(err));
- });
-}
-
-export function isArrayBufferView(value: any): value is ArrayBufferView {
- return value !== null && typeof value === 'object' &&
- value.buffer instanceof ArrayBuffer && typeof value.byteLength === 'number' && typeof value.byteOffset === 'number';
-}
-
-export function isOptions(options: any): options is SpawnOptions.OptionsObject {
- return options !== null && typeof options === 'object';
-}
-
-export function isFileBlob(blob: any): blob is FileBlob {
- return blob instanceof Blob && Reflect.get(blob, 'readable') instanceof ReadableStream && typeof Reflect.get(blob, 'writer') === 'function';
-}
+import streams from 'node:stream'; +import type { SpawnOptions, FileBlob } from 'bun'; + +export const getter = <T>(obj: T, key: string | symbol, get: () => any, enumerable = false, configurable = true): void => { + Object.defineProperty(obj, key, { get, configurable, enumerable }); +}; + +export const setter = <T>(obj: T, key: string | symbol, set: () => any, enumerable = false, configurable = true): void => { + Object.defineProperty(obj, key, { set, configurable, enumerable }); +}; + +export const readonly = <T>(obj: T, key: string | symbol, value: unknown, enumerable = false, configurable = true): void => { + Object.defineProperty(obj, key, { value, configurable, enumerable }); +}; + +export function streamToBuffer(stream: streams.Readable | streams.Duplex): Promise<Buffer> { + return new Promise((resolve, reject) => { + const buffers: Uint8Array[] = []; + stream.on("data", (chunk: Uint8Array) => buffers.push(chunk)); + stream.on("end", () => resolve(Buffer.concat(buffers))); + stream.on("error", (err: Error) => reject(err)); + }); +} + +export function isArrayBufferView(value: any): value is ArrayBufferView { + return value !== null && typeof value === 'object' && + value.buffer instanceof ArrayBuffer && typeof value.byteLength === 'number' && typeof value.byteOffset === 'number'; +} + +export function isOptions(options: any): options is SpawnOptions.OptionsObject { + return options !== null && typeof options === 'object'; +} + +export function isFileBlob(blob: any): blob is FileBlob { + return blob instanceof Blob && Reflect.get(blob, 'readable') instanceof ReadableStream && typeof Reflect.get(blob, 'writer') === 'function'; +} |