aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile2
-rw-r--r--README.md52
-rw-r--r--examples/blank/package.json8
-rw-r--r--examples/blank/src/index.js1
-rw-r--r--examples/blank/tsconfig.json9
-rw-r--r--examples/discord-interactions/README.md6
-rw-r--r--examples/discord-interactions/gitignore88
-rw-r--r--examples/discord-interactions/package.json2
-rw-r--r--examples/next/README.md16
-rw-r--r--examples/next/package.json2
-rw-r--r--examples/next/pages/index.tsx6
-rw-r--r--examples/next/pages/second.tsx22
-rw-r--r--examples/react/README.md74
-rw-r--r--examples/react/package.json2
-rw-r--r--examples/react/src/images.d.ts4
-rw-r--r--packages/bun-types/package.json2
-rw-r--r--packages/bun-types/types.d.ts577
-rw-r--r--src/napi/napi.zig6
-rw-r--r--types/bun/bun.d.ts41
-rw-r--r--types/bun/globals.d.ts305
-rw-r--r--types/bun/jsc.d.ts2
-rw-r--r--types/bun/sqlite.d.ts12
22 files changed, 1095 insertions, 144 deletions
diff --git a/Makefile b/Makefile
index e0009eb38..abcf5b0d5 100644
--- a/Makefile
+++ b/Makefile
@@ -425,6 +425,8 @@ vendor-without-check: api analytics node-fallbacks runtime_js fallback_decoder b
prepare-types:
BUN_VERSION=$(PACKAGE_JSON_VERSION) $(BUN_RELEASE_BIN) types/bun/bundle.ts packages/bun-types
echo "Generated types for $(PACKAGE_JSON_VERSION) in packages/bun-types"
+ cp packages/bun-types/types.d.ts /tmp/bun-types.d.ts
+ cd /tmp && tsc /tmp/bun-types.d.ts
release-types:
cd packages/bun-types && npm publish
diff --git a/README.md b/README.md
index e504cc3db..6ac5a32ba 100644
--- a/README.md
+++ b/README.md
@@ -63,9 +63,15 @@ If using Linux, kernel version 5.6 or higher is strongly recommended, but the mi
- [CSS runtime](#css-runtime)
- [Frameworks](#frameworks)
- [Troubleshooting](#troubleshooting)
+ - [Illegal Instruction (Core Dumped)](#illegal-instruction-core-dumped)
- [bun not running on an M1 (or Apple Silicon)](#bun-not-running-on-an-m1-or-apple-silicon)
- [error: Unexpected](#error-unexpected)
- [bun install is stuck](#bun-install-is-stuck)
+ - [Unzip is required](#unzip-is-required)
+ - [Debian / Ubuntu / Mint](#debian--ubuntu--mint)
+ - [RedHat / CentOS / Fedora](#redhat--centos--fedora)
+ - [Arch / Manjaro](#arch--manjaro)
+ - [OpenSUSE](#opensuse)
- [Reference](#reference)
- [`bun install`](#bun-install)
- [Configuring bun install with `bunfig.toml`](#configuring-bun-install-with-bunfigtoml)
@@ -453,7 +459,7 @@ To create new a React app:
```bash
bun create react ./app
cd app
-bun
+bun dev # start dev server
```
To use an existing React app:
@@ -466,7 +472,7 @@ npm install -D react-refresh
bun bun ./src/index.js # jsx, tsx, ts also work. can be multiple files
# Start the dev server
-bun
+bun dev
```
From there, bun relies on the filesystem for mapping dev server paths to source files. All URL paths are relative to the project root (where `package.json` is located).
@@ -896,7 +902,24 @@ For developing frameworks, you can also do `bun bun --use ./relative-path-to-fra
If you’re interested in adding a framework integration, please reach out. There’s a lot here and it’s not entirely documented yet.
## Troubleshooting
-
+### Illegal Instruction (Core Dumped)
+If you get this error while bun is initializing, You probably need to wrap the bun executable with intel-sde
+ 1. Install intel-sde
+ - Arch Linux: `yay -S intel-sde`
+ - Other Distros:
+```
+# wget https://downloadmirror.intel.com/732268/sde-external-9.7.0-2022-05-09-lin.tar.xz -O /tmp/intel-sde.tar.xz
+# cd /tmp
+# tar -xf intel-sde.tar.xz
+# cd sde-external*
+# mkdir /usr/local/bin -p
+# cp sde64 /usr/local/bin/sde
+```
+ 2. Add alias to bashrc
+```
+$ echo "alias bun='sde -chip-check-disable -- bun'" >> ~/.bashrc
+```
+You can replace `.bashrc` with `.zshrc` if you use zsh instead of bash
### bun not running on an M1 (or Apple Silicon)
If you see a message like this
@@ -923,6 +946,29 @@ To fix this issue:
2. Try again, and if the error still occurs, try setting `ulimit -n` to an absurdly high number, such as `ulimit -n 2147483646`
3. Try again, and if that still doesn’t fix it, open an issue
+### Unzip is required
+Unzip is required to install bun on Linux. You can use one of the following commands to install `unzip`:
+
+#### Debian / Ubuntu / Mint
+```sh
+sudo apt install unzip
+```
+
+#### RedHat / CentOS / Fedora
+```sh
+sudo dnf install unzip
+```
+
+#### Arch / Manjaro
+```sh
+sudo pacman -S unzip
+```
+
+#### OpenSUSE
+```sh
+sudo zypper install unzip
+```
+
### bun install is stuck
Please run `bun install --verbose 2> logs.txt` and send them to me in bun's discord. If you're on Linux, it would also be helpful if you run `sudo perf trace bun install --silent` and attach the logs.
diff --git a/examples/blank/package.json b/examples/blank/package.json
new file mode 100644
index 000000000..02cfd3104
--- /dev/null
+++ b/examples/blank/package.json
@@ -0,0 +1,8 @@
+{
+ "version": "1.0.0",
+ "name": "blank",
+ "main": "src/index.js",
+ "devDependencies": {
+ "bun-types": "^0.0.83"
+ }
+} \ No newline at end of file
diff --git a/examples/blank/src/index.js b/examples/blank/src/index.js
new file mode 100644
index 000000000..eb065f93f
--- /dev/null
+++ b/examples/blank/src/index.js
@@ -0,0 +1 @@
+console.log('Hello, World!');
diff --git a/examples/blank/tsconfig.json b/examples/blank/tsconfig.json
new file mode 100644
index 000000000..ebdb7d2f6
--- /dev/null
+++ b/examples/blank/tsconfig.json
@@ -0,0 +1,9 @@
+{
+ "compilerOptions": {
+ "lib": ["ESNext"],
+ "module": "esnext",
+ "target": "esnext",
+ // "bun-types" is the important part
+ "types": ["bun-types"]
+ }
+}
diff --git a/examples/discord-interactions/README.md b/examples/discord-interactions/README.md
index d620cacd1..84e69d9b2 100644
--- a/examples/discord-interactions/README.md
+++ b/examples/discord-interactions/README.md
@@ -10,12 +10,6 @@ A [slash-create](https://npm.im/slash-create) template, using [Bun runtime](http
bun create discord-interactions interactions-bot
```
-After that, make sure to install dependencies using bun or any other npm compatible package manager:
-
-```sh
-bun install
-```
-
### Development
To run this locally, rename `.env.example` to `.env` and fill in the variables, then you can run `bun run.js` to start a local dev environment and use something like ngrok/cloudflare to tunnel it to a URL.
diff --git a/examples/discord-interactions/gitignore b/examples/discord-interactions/gitignore
new file mode 100644
index 000000000..1c22d540a
--- /dev/null
+++ b/examples/discord-interactions/gitignore
@@ -0,0 +1,88 @@
+/data
+
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+
+#config file
+config.json
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# TypeScript v1 declaration files
+typings/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variables file
+.env
+.env.test
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+
+# TernJS port file
+.tern-port
+
+# pm2
+.pm2
+.pm2.bak
diff --git a/examples/discord-interactions/package.json b/examples/discord-interactions/package.json
index d31709aa2..5f5213622 100644
--- a/examples/discord-interactions/package.json
+++ b/examples/discord-interactions/package.json
@@ -1,5 +1,5 @@
{
- "version": "0.0.3",
+ "version": "0.0.5",
"name": "@bun-examples/discord-interactions",
"dependencies": {
"slash-create": "^5.7.0"
diff --git a/examples/next/README.md b/examples/next/README.md
index 95725a34b..cd0b17310 100644
--- a/examples/next/README.md
+++ b/examples/next/README.md
@@ -1,7 +1,15 @@
-This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
+# Next.js with Bun runtime
+
+This is a [Next.js](https://nextjs.org/) project bootstrapped with Bun.
## Getting Started
+### Cloning the repo
+
+```sh
+bun create next ./app
+```
+
First, run the development server:
```bash
@@ -10,11 +18,7 @@ bun dev
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
-You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file.
-
-[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`.
-
-The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages.
+You can start editing the page by modifying `pages/index.tsx`. The page auto-updates as you edit the file.
## Learn More
diff --git a/examples/next/package.json b/examples/next/package.json
index 1fbf8e85b..870dc2c34 100644
--- a/examples/next/package.json
+++ b/examples/next/package.json
@@ -1,6 +1,6 @@
{
"name": "@bun-examples/next",
- "version": "0.0.52",
+ "version": "0.0.53",
"main": "index.js",
"dependencies": {
"next": "^12.1.0",
diff --git a/examples/next/pages/index.tsx b/examples/next/pages/index.tsx
index 90b988011..fc078404d 100644
--- a/examples/next/pages/index.tsx
+++ b/examples/next/pages/index.tsx
@@ -1,13 +1,7 @@
import Head from "next/head";
-import Link from "next/link";
import React from "react";
import styles from "../styles/Home.module.css";
import nextPackage from "next/package.json";
-export async function getStaticProps(ctx) {
- return {
- props: {},
- };
-}
export default function Home({}) {
return (
diff --git a/examples/next/pages/second.tsx b/examples/next/pages/second.tsx
deleted file mode 100644
index 0e7b5a5cf..000000000
--- a/examples/next/pages/second.tsx
+++ /dev/null
@@ -1,22 +0,0 @@
-import Link from "next/link";
-
-export default function Second({}) {
- return (
- <div style={{ padding: 16 }}>
- <h1>Second</h1>
-
- <ul>
- <li>
- <Link href="/">
- <a>Root page</a>
- </Link>
- </li>
- <li>
- <Link href="/foo/bar/third">
- <a>Third! page</a>
- </Link>
- </li>
- </ul>
- </div>
- );
-}
diff --git a/examples/react/README.md b/examples/react/README.md
index 02aac3f6e..a857d896c 100644
--- a/examples/react/README.md
+++ b/examples/react/README.md
@@ -1,70 +1,24 @@
-# Getting Started with Create React App
+# React with Bun runtime
-This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
+This is a React project boostrapped with bun.
-## Available Scripts
+## Getting Started
-In the project directory, you can run:
+### Cloning the repo
-### `yarn start`
+```sh
+bun create react ./app
+```
-Runs the app in the development mode.\
-Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
+### Development
-The page will reload if you make edits.\
-You will also see any lint errors in the console.
+First, run the development server.
-### `yarn test`
+```
+bun dev
+```
-Launches the test runner in the interactive watch mode.\
-See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
+Open http://localhost:3000 with your browser to see the result.
-### `yarn build`
+You can start editing the page by modifying src/App.jsx. The page auto-updates as you edit the file.
-Builds the app for production to the `build` folder.\
-It correctly bundles React in production mode and optimizes the build for the best performance.
-
-The build is minified and the filenames include the hashes.\
-Your app is ready to be deployed!
-
-See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
-
-### `yarn eject`
-
-**Note: this is a one-way operation. Once you `eject`, you can’t go back!**
-
-If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
-
-Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own.
-
-You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it.
-
-## Learn More
-
-You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
-
-To learn React, check out the [React documentation](https://reactjs.org/).
-
-### Code Splitting
-
-This section has moved here: [https://facebook.github.io/create-react-app/docs/code-splitting](https://facebook.github.io/create-react-app/docs/code-splitting)
-
-### Analyzing the Bundle Size
-
-This section has moved here: [https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size](https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size)
-
-### Making a Progressive Web App
-
-This section has moved here: [https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app](https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app)
-
-### Advanced Configuration
-
-This section has moved here: [https://facebook.github.io/create-react-app/docs/advanced-configuration](https://facebook.github.io/create-react-app/docs/advanced-configuration)
-
-### Deployment
-
-This section has moved here: [https://facebook.github.io/create-react-app/docs/deployment](https://facebook.github.io/create-react-app/docs/deployment)
-
-### `yarn build` fails to minify
-
-This section has moved here: [https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify](https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify)
diff --git a/examples/react/package.json b/examples/react/package.json
index 8e180116e..e3e8e39b3 100644
--- a/examples/react/package.json
+++ b/examples/react/package.json
@@ -1,6 +1,6 @@
{
"name": "@bun-examples/react",
- "version": "0.0.42",
+ "version": "0.0.47",
"dependencies": {
"react": "^17.0.2",
"react-dom": "^17.0.2",
diff --git a/examples/react/src/images.d.ts b/examples/react/src/images.d.ts
new file mode 100644
index 000000000..091d25e21
--- /dev/null
+++ b/examples/react/src/images.d.ts
@@ -0,0 +1,4 @@
+declare module '*.svg' {
+ const content: any;
+ export default content;
+}
diff --git a/packages/bun-types/package.json b/packages/bun-types/package.json
index a0d4f40a3..c98586107 100644
--- a/packages/bun-types/package.json
+++ b/packages/bun-types/package.json
@@ -1,6 +1,6 @@
{
"name": "bun-types",
- "version": "0.0.83",
+ "version": "0.1.2",
"description": "Type definitions for bun.js",
"types": "types.d.ts",
"files": [
diff --git a/packages/bun-types/types.d.ts b/packages/bun-types/types.d.ts
index 75a2b8c93..087005866 100644
--- a/packages/bun-types/types.d.ts
+++ b/packages/bun-types/types.d.ts
@@ -193,6 +193,178 @@ declare module "bun" {
}
/**
+ * Concatenate an array of typed arrays into a single `ArrayBuffer`. This is a fast path.
+ *
+ * You can do this manually if you'd like, but this function will generally
+ * be a little faster.
+ *
+ * If you want a `Uint8Array` instead, consider `Buffer.concat`.
+ *
+ * @param buffers An array of typed arrays to concatenate.
+ * @returns An `ArrayBuffer` with the data from all the buffers.
+ *
+ * Here is similar code to do it manually, except about 30% slower:
+ * ```js
+ * var chunks = [...];
+ * var size = 0;
+ * for (const chunk of chunks) {
+ * size += chunk.byteLength;
+ * }
+ * var buffer = new ArrayBuffer(size);
+ * var view = new Uint8Array(buffer);
+ * var offset = 0;
+ * for (const chunk of chunks) {
+ * view.set(chunk, offset);
+ * offset += chunk.byteLength;
+ * }
+ * return buffer;
+ * ```
+ *
+ * This function is faster because it uses uninitialized memory when copying. Since the entire
+ * length of the buffer is known, it is safe to use uninitialized memory.
+ */
+ export function concatArrayBuffers(
+ buffers: Array<ArrayBufferView | ArrayBufferLike>
+ ): ArrayBuffer;
+
+ /**
+ * Consume all data from a {@link ReadableStream} until it closes or errors.
+ *
+ * Concatenate the chunks into a single {@link ArrayBuffer}.
+ *
+ * Each chunk must be a TypedArray or an ArrayBuffer. If you need to support
+ * chunks of different types, consider {@link readableStreamToBlob}
+ *
+ * @param stream The stream to consume.
+ * @returns A promise that resolves with the concatenated chunks or the concatenated chunks as an `ArrayBuffer`.
+ */
+ export function readableStreamToArrayBuffer(
+ stream: ReadableStream
+ ): Promise<ArrayBuffer> | ArrayBuffer;
+
+ /**
+ * Consume all data from a {@link ReadableStream} until it closes or errors.
+ *
+ * Concatenate the chunks into a single {@link Blob}.
+ *
+ * @param stream The stream to consume.
+ * @returns A promise that resolves with the concatenated chunks as a {@link Blob}.
+ */
+ export function readableStreamToBlob(stream: ReadableStream): Promise<Blob>;
+
+ /**
+ * Consume all data from a {@link ReadableStream} until it closes or errors.
+ *
+ * Concatenate the chunks into a single string. Chunks must be a TypedArray or an ArrayBuffer. If you need to support chunks of different types, consider {@link readableStreamToBlob}.
+ *
+ * @param stream The stream to consume.
+ * @returns A promise that resolves with the concatenated chunks as a {@link String}.
+ */
+ export function readableStreamToText(stream: ReadableStream): Promise<string>;
+
+ /**
+ * Consume all data from a {@link ReadableStream} until it closes or errors.
+ *
+ * Concatenate the chunks into a single string and parse as JSON. Chunks must be a TypedArray or an ArrayBuffer. If you need to support chunks of different types, consider {@link readableStreamToBlob}.
+ *
+ * @param stream The stream to consume.
+ * @returns A promise that resolves with the concatenated chunks as a {@link String}.
+ */
+ export function readableStreamToJSON(stream: ReadableStream): Promise<any>;
+
+ /**
+ * Consume all data from a {@link ReadableStream} until it closes or errors.
+ *
+ * @param stream The stream to consume
+ * @returns A promise that resolves with the chunks as an array
+ *
+ */
+ export function readableStreamToArray<T>(
+ stream: ReadableStream
+ ): Promise<T[]> | T[];
+
+ /**
+ * Escape the following characters in a string:
+ *
+ * - `"` becomes `"&quot;"`
+ * - `&` becomes `"&amp;"`
+ * - `'` becomes `"&#x27;"`
+ * - `<` becomes `"&lt;"`
+ * - `>` becomes `"&gt;"`
+ *
+ * This function is optimized for large input. On an M1X, it processes 480 MB/s -
+ * 20 GB/s, depending on how much data is being escaped and whether there is non-ascii
+ * text.
+ *
+ * Non-string types will be converted to a string before escaping.
+ */
+ export function escapeHTML(input: string | object | number | boolean): string;
+
+ /**
+ * Convert a filesystem path to a file:// URL.
+ *
+ * @param path The path to convert.
+ * @returns A {@link URL} with the file:// scheme.
+ *
+ * @example
+ * ```js
+ * const url = Bun.pathToFileURL("/foo/bar.txt");
+ * console.log(url.href); // "file:///foo/bar.txt"
+ *```
+ *
+ * Internally, this function uses WebKit's URL API to
+ * convert the path to a file:// URL.
+ */
+ export function pathToFileURL(path: string): URL;
+
+ /**
+ * Convert a {@link URL} to a filesystem path.
+ * @param url The URL to convert.
+ * @returns A filesystem path.
+ * @throws If the URL is not a URL.
+ * @example
+ * ```js
+ * const path = Bun.fileURLToPath(new URL("file:///foo/bar.txt"));
+ * console.log(path); // "/foo/bar.txt"
+ * ```
+ */
+ export function fileURLToPath(url: URL): string;
+
+ /**
+ * Fast incremental writer that becomes an `ArrayBuffer` on end().
+ */
+ export class ArrayBufferSink {
+ constructor();
+
+ start(options?: {
+ asUint8Array?: boolean;
+ /**
+ * Preallocate an internal buffer of this size
+ * This can significantly improve performance when the chunk size is small
+ */
+ highWaterMark?: number;
+ /**
+ * On {@link ArrayBufferSink.flush}, return the written data as a `Uint8Array`.
+ * Writes will restart from the beginning of the buffer.
+ */
+ stream?: boolean;
+ }): void;
+
+ write(chunk: string | ArrayBufferView | ArrayBuffer): number;
+ /**
+ * Flush the internal buffer
+ *
+ * If {@link ArrayBufferSink.start} was passed a `stream` option, this will return a `ArrayBuffer`
+ * If {@link ArrayBufferSink.start} was passed a `stream` option and `asUint8Array`, this will return a `Uint8Array`
+ * Otherwise, this will return the number of bytes written since the last flush
+ *
+ * This API might change later to separate Uint8ArraySink and ArrayBufferSink
+ */
+ flush(): number | Uint8Array | ArrayBuffer;
+ end(): ArrayBuffer | Uint8Array;
+ }
+
+ /**
* [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) powered by the fastest system calls available for operating on files.
*
* This Blob is lazy. That means it won't do any work until you read from it.
@@ -366,6 +538,16 @@ declare module "bun" {
* ```
*/
macros?: MacroMap;
+
+ autoImportJSX?: boolean;
+ allowBunRuntime?: boolean;
+ exports?: {
+ eliminate?: string[];
+ replace?: Record<string, string>;
+ };
+ treeShaking?: boolean;
+ trimUnusedImports?: boolean;
+ jsxOptimizationInline?: boolean;
}
/**
@@ -843,9 +1025,9 @@ declare module "bun" {
* This uses a high-resolution monotonic system timer.
*
* After 14 weeks of consecutive uptime, this function
- * returns a `bigint` to prevent overflow
+ * wraps
*/
- export function nanoseconds(): number | bigint;
+ export function nanoseconds(): number;
/**
* Generate a heap snapshot for seeing where the heap is being used
@@ -1060,6 +1242,7 @@ interface BufferEncodingOption {
declare var Bun: typeof import("bun");
+
// ./ffi.d.ts
/**
@@ -1826,6 +2009,7 @@ declare module "bun:ffi" {
export const suffix: string;
}
+
// ./sqlite.d.ts
/**
@@ -1858,7 +2042,7 @@ declare module "bun:sqlite" {
/**
* Open or create a SQLite3 database
*
- * @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` for an in-memory database.
+ * @param filename The filename of the database to open. Pass an empty string (`""` or undefined) or `":memory:"` for an in-memory database.
* @param options defaults to `{readwrite: true, create: true}`. If a number, then it's treated as `SQLITE_OPEN_*` constant flags.
*
* @example
@@ -1890,7 +2074,7 @@ declare module "bun:sqlite" {
* ```
*/
constructor(
- filename: string,
+ filename?: string,
options?:
| number
| {
@@ -1997,14 +2181,14 @@ declare module "bun:sqlite" {
*/
run<ParamsType = SQLQueryBindings>(
sqlQuery: string,
- ...bindings: ParamsType
+ ...bindings: ParamsType[]
): void;
/**
This is an alias of {@link Database.prototype.run}
*/
exec<ParamsType = SQLQueryBindings>(
sqlQuery: string,
- ...bindings: ParamsType
+ ...bindings: ParamsType[]
): void;
/**
@@ -2187,8 +2371,6 @@ declare module "bun:sqlite" {
};
}
- export { default as Database };
-
/**
* A prepared statement.
*
@@ -2574,8 +2756,11 @@ declare module "bun:sqlite" {
| boolean
| null
| Record<string, string | bigint | TypedArray | number | boolean | null>;
+
+ export default Database;
}
+
// ./fs.d.ts
/**
@@ -6177,6 +6362,7 @@ declare module "node:fs" {
export = fs;
}
+
// ./html-rewriter.d.ts
declare namespace HTMLRewriterTypes {
@@ -6293,6 +6479,7 @@ declare class HTMLRewriter {
transform(input: Response): Response;
}
+
// ./globals.d.ts
type Encoding = "utf-8" | "windows-1252" | "utf-16";
@@ -6747,8 +6934,13 @@ type ReferrerPolicy =
| "unsafe-url";
type RequestInfo = Request | string;
-type BodyInit = XMLHttpRequestBodyInit;
+type BodyInit = ReadableStream | XMLHttpRequestBodyInit;
type XMLHttpRequestBodyInit = Blob | BufferSource | string;
+type ReadableStreamController<T> = ReadableStreamDefaultController<T>;
+type ReadableStreamDefaultReadResult<T> =
+ | ReadableStreamDefaultReadValueResult<T>
+ | ReadableStreamDefaultReadDoneResult;
+type ReadableStreamReader<T> = ReadableStreamDefaultReader<T>;
interface RequestInit {
/**
@@ -7721,6 +7913,305 @@ declare var Loader: {
resolveSync: (specifier: string, from: string) => string;
};
+/** This Streams API interface represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. */
+interface ReadableStream<R = any> {
+ readonly locked: boolean;
+ cancel(reason?: any): Promise<void>;
+ getReader(): ReadableStreamDefaultReader<R>;
+ pipeThrough<T>(
+ transform: ReadableWritablePair<T, R>,
+ options?: StreamPipeOptions
+ ): ReadableStream<T>;
+ pipeTo(
+ destination: WritableStream<R>,
+ options?: StreamPipeOptions
+ ): Promise<void>;
+ tee(): [ReadableStream<R>, ReadableStream<R>];
+ forEach(
+ callbackfn: (value: any, key: number, parent: ReadableStream<R>) => void,
+ thisArg?: any
+ ): void;
+}
+
+declare var ReadableStream: {
+ prototype: ReadableStream;
+ new <R = any>(
+ underlyingSource?: DirectUnderlyingSource<R> | UnderlyingSource<R>,
+ strategy?: QueuingStrategy<R>
+ ): ReadableStream<R>;
+};
+
+interface QueuingStrategy<T = any> {
+ highWaterMark?: number;
+ size?: QueuingStrategySize<T>;
+}
+
+interface QueuingStrategyInit {
+ /**
+ * Creates a new ByteLengthQueuingStrategy with the provided high water mark.
+ *
+ * Note that the provided high water mark will not be validated ahead of time. Instead, if it is negative, NaN, or not a number, the resulting ByteLengthQueuingStrategy will cause the corresponding stream constructor to throw.
+ */
+ highWaterMark: number;
+}
+
+/** This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. */
+interface ByteLengthQueuingStrategy extends QueuingStrategy<ArrayBufferView> {
+ readonly highWaterMark: number;
+ readonly size: QueuingStrategySize<ArrayBufferView>;
+}
+
+declare var ByteLengthQueuingStrategy: {
+ prototype: ByteLengthQueuingStrategy;
+ new (init: QueuingStrategyInit): ByteLengthQueuingStrategy;
+};
+
+interface ReadableStreamDefaultController<R = any> {
+ readonly desiredSize: number | null;
+ close(): void;
+ enqueue(chunk?: R): void;
+ error(e?: any): void;
+}
+
+interface ReadableStreamDirectController {
+ close(error?: Error): void;
+ write(data: ArrayBufferView | ArrayBuffer | string): number | Promise<number>;
+ end(): number | Promise<number>;
+ flush(): number | Promise<number>;
+ start(): void;
+}
+
+declare var ReadableStreamDefaultController: {
+ prototype: ReadableStreamDefaultController;
+ new (): ReadableStreamDefaultController;
+};
+
+interface ReadableStreamDefaultReader<R = any>
+ extends ReadableStreamGenericReader {
+ read(): Promise<ReadableStreamDefaultReadResult<R>>;
+ releaseLock(): void;
+}
+
+declare var ReadableStreamDefaultReader: {
+ prototype: ReadableStreamDefaultReader;
+ new <R = any>(stream: ReadableStream<R>): ReadableStreamDefaultReader<R>;
+};
+
+interface ReadableStreamGenericReader {
+ readonly closed: Promise<undefined>;
+ cancel(reason?: any): Promise<void>;
+}
+
+interface ReadableStreamDefaultReadDoneResult {
+ done: true;
+ value?: undefined;
+}
+
+interface ReadableStreamDefaultReadValueResult<T> {
+ done: false;
+ value: T;
+}
+
+interface ReadableWritablePair<R = any, W = any> {
+ readable: ReadableStream<R>;
+ /**
+ * Provides a convenient, chainable way of piping this readable stream through a transform stream (or any other { writable, readable } pair). It simply pipes the stream into the writable side of the supplied pair, and returns the readable side for further use.
+ *
+ * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader.
+ */
+ writable: WritableStream<W>;
+}
+
+/** This Streams API interface provides a standard abstraction for writing streaming data to a destination, known as a sink. This object comes with built-in backpressure and queuing. */
+interface WritableStream<W = any> {
+ readonly locked: boolean;
+ abort(reason?: any): Promise<void>;
+ close(): Promise<void>;
+ getWriter(): WritableStreamDefaultWriter<W>;
+}
+
+declare var WritableStream: {
+ prototype: WritableStream;
+ new <W = any>(
+ underlyingSink?: UnderlyingSink<W>,
+ strategy?: QueuingStrategy<W>
+ ): WritableStream<W>;
+};
+
+/** This Streams API interface represents a controller allowing control of a WritableStream's state. When constructing a WritableStream, the underlying sink is given a corresponding WritableStreamDefaultController instance to manipulate. */
+interface WritableStreamDefaultController {
+ error(e?: any): void;
+}
+
+declare var WritableStreamDefaultController: {
+ prototype: WritableStreamDefaultController;
+ new (): WritableStreamDefaultController;
+};
+
+/** This Streams API interface is the object returned by WritableStream.getWriter() and once created locks the < writer to the WritableStream ensuring that no other streams can write to the underlying sink. */
+interface WritableStreamDefaultWriter<W = any> {
+ readonly closed: Promise<undefined>;
+ readonly desiredSize: number | null;
+ readonly ready: Promise<undefined>;
+ abort(reason?: any): Promise<void>;
+ close(): Promise<void>;
+ releaseLock(): void;
+ write(chunk?: W): Promise<void>;
+}
+
+declare var WritableStreamDefaultWriter: {
+ prototype: WritableStreamDefaultWriter;
+ new <W = any>(stream: WritableStream<W>): WritableStreamDefaultWriter<W>;
+};
+
+interface TransformerFlushCallback<O> {
+ (controller: TransformStreamDefaultController<O>): void | PromiseLike<void>;
+}
+
+interface TransformerStartCallback<O> {
+ (controller: TransformStreamDefaultController<O>): any;
+}
+
+interface TransformerTransformCallback<I, O> {
+ (
+ chunk: I,
+ controller: TransformStreamDefaultController<O>
+ ): void | PromiseLike<void>;
+}
+
+interface UnderlyingSinkAbortCallback {
+ (reason?: any): void | PromiseLike<void>;
+}
+
+interface UnderlyingSinkCloseCallback {
+ (): void | PromiseLike<void>;
+}
+
+interface UnderlyingSinkStartCallback {
+ (controller: WritableStreamDefaultController): any;
+}
+
+interface UnderlyingSinkWriteCallback<W> {
+ (
+ chunk: W,
+ controller: WritableStreamDefaultController
+ ): void | PromiseLike<void>;
+}
+
+interface UnderlyingSourceCancelCallback {
+ (reason?: any): void | PromiseLike<void>;
+}
+
+interface UnderlyingSink<W = any> {
+ abort?: UnderlyingSinkAbortCallback;
+ close?: UnderlyingSinkCloseCallback;
+ start?: UnderlyingSinkStartCallback;
+ type?: undefined | "default" | "bytes";
+ write?: UnderlyingSinkWriteCallback<W>;
+}
+
+interface UnderlyingSource<R = any> {
+ cancel?: UnderlyingSourceCancelCallback;
+ pull?: UnderlyingSourcePullCallback<R>;
+ start?: UnderlyingSourceStartCallback<R>;
+ type?: undefined;
+}
+
+interface DirectUnderlyingSource<R = any> {
+ cancel?: UnderlyingSourceCancelCallback;
+ pull: (
+ controller: ReadableStreamDirectController
+ ) => void | PromiseLike<void>;
+ type: "direct";
+}
+
+interface UnderlyingSourcePullCallback<R> {
+ (controller: ReadableStreamController<R>): void | PromiseLike<void>;
+}
+
+interface UnderlyingSourceStartCallback<R> {
+ (controller: ReadableStreamController<R>): any;
+}
+
+interface GenericTransformStream {
+ readonly readable: ReadableStream;
+ readonly writable: WritableStream;
+}
+
+interface TransformStream<I = any, O = any> {
+ readonly readable: ReadableStream<O>;
+ readonly writable: WritableStream<I>;
+}
+
+declare var TransformStream: {
+ prototype: TransformStream;
+ new <I = any, O = any>(
+ transformer?: Transformer<I, O>,
+ writableStrategy?: QueuingStrategy<I>,
+ readableStrategy?: QueuingStrategy<O>
+ ): TransformStream<I, O>;
+};
+
+interface TransformStreamDefaultController<O = any> {
+ readonly desiredSize: number | null;
+ enqueue(chunk?: O): void;
+ error(reason?: any): void;
+ terminate(): void;
+}
+
+declare var TransformStreamDefaultController: {
+ prototype: TransformStreamDefaultController;
+ new (): TransformStreamDefaultController;
+};
+
+interface StreamPipeOptions {
+ preventAbort?: boolean;
+ preventCancel?: boolean;
+ /**
+ * Pipes this readable stream to a given writable stream destination. The way in which the piping process behaves under various error conditions can be customized with a number of passed options. It returns a promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered.
+ *
+ * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader.
+ *
+ * Errors and closures of the source and destination streams propagate as follows:
+ *
+ * An error in this source readable stream will abort destination, unless preventAbort is truthy. The returned promise will be rejected with the source's error, or with any error that occurs during aborting the destination.
+ *
+ * An error in destination will cancel this source readable stream, unless preventCancel is truthy. The returned promise will be rejected with the destination's error, or with any error that occurs during canceling the source.
+ *
+ * When this source readable stream closes, destination will be closed, unless preventClose is truthy. The returned promise will be fulfilled once this process completes, unless an error is encountered while closing the destination, in which case it will be rejected with that error.
+ *
+ * If destination starts out closed or closing, this source readable stream will be canceled, unless preventCancel is true. The returned promise will be rejected with an error indicating piping to a closed stream failed, or with any error that occurs during canceling the source.
+ *
+ * The signal option can be set to an AbortSignal to allow aborting an ongoing pipe operation via the corresponding AbortController. In this case, this source readable stream will be canceled, and destination aborted, unless the respective options preventCancel or preventAbort are set.
+ */
+ preventClose?: boolean;
+ signal?: AbortSignal;
+}
+
+/** This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. */
+interface CountQueuingStrategy extends QueuingStrategy {
+ readonly highWaterMark: number;
+ readonly size: QueuingStrategySize;
+}
+
+declare var CountQueuingStrategy: {
+ prototype: CountQueuingStrategy;
+ new (init: QueuingStrategyInit): CountQueuingStrategy;
+};
+
+interface QueuingStrategySize<T = any> {
+ (chunk?: T): number;
+}
+
+interface Transformer<I = any, O = any> {
+ flush?: TransformerFlushCallback<O>;
+ readableType?: undefined;
+ start?: TransformerStartCallback<O>;
+ transform?: TransformerTransformCallback<I, O>;
+ writableType?: undefined;
+}
+
+
// ./path.d.ts
/**
@@ -7928,6 +8419,7 @@ declare module "node:path/win32" {
export * from "path/win32";
}
+
// ./bun-test.d.ts
/**
@@ -7950,7 +8442,7 @@ declare module "node:path/win32" {
*/
declare module "bun:test" {
- export function describe(label: string, body: () => {}): any;
+ export function describe(label: string, body: () => void): any;
export function it(label: string, test: () => void | Promise<any>): any;
export function test(label: string, test: () => void | Promise<any>): any;
@@ -7966,3 +8458,68 @@ declare module "test" {
import BunTestModule = require("bun:test");
export = BunTestModule;
}
+
+
+// ./jsc.d.ts
+
+declare module "bun:jsc" {
+ export function describe(value: any): string;
+ export function describeArray(args: any[]): string;
+ export function gcAndSweep(): void;
+ export function fullGC(): void;
+ export function edenGC(): void;
+ export function heapSize(): number;
+ export function heapStats(): {
+ heapSize: number;
+ heapCapacity: number;
+ extraMemorySize: number;
+ objectCount: number;
+ protectedObjectCount: number;
+ globalObjectCount: number;
+ protectedGlobalObjectCount: number;
+ objectTypeCounts: Record<string, number>;
+ protectedObjectTypeCounts: Record<string, number>;
+ };
+ export function memoryUsage(): {
+ current: number;
+ peak: number;
+ currentCommit: number;
+ peakCommit: number;
+ pageFaults: number;
+ };
+ export function getRandomSeed(): number;
+ export function setRandomSeed(value: number): void;
+ export function isRope(input: string): boolean;
+ export function callerSourceOrigin(): string;
+ export function noFTL(func: Function): Function;
+ export function noOSRExitFuzzing(func: Function): Function;
+ export function optimizeNextInvocation(func: Function): Function;
+ export function numberOfDFGCompiles(func: Function): number;
+ export function releaseWeakRefs(): void;
+ export function totalCompileTime(func: Function): number;
+ export function reoptimizationRetryCount(func: Function): number;
+ export function drainMicrotasks(): void;
+
+ /**
+ * This returns objects which native code has explicitly protected from being
+ * garbage collected
+ *
+ * By calling this function you create another reference to the object, which
+ * will further prevent it from being garbage collected
+ *
+ * This function is mostly a debugging tool for bun itself.
+ *
+ * Warning: not all objects returned are supposed to be observable from JavaScript
+ */
+ export function getProtectedObjects(): any[];
+
+ /**
+ * Start a remote debugging socket server on the given port.
+ *
+ * This exposes JavaScriptCore's built-in debugging server.
+ *
+ * This is untested. May not be supported yet on macOS
+ */
+ export function startRemoteDebugger(host?: string, port?: number): void;
+}
+
diff --git a/src/napi/napi.zig b/src/napi/napi.zig
index 65ef0c45c..c66fd1f32 100644
--- a/src/napi/napi.zig
+++ b/src/napi/napi.zig
@@ -824,10 +824,8 @@ pub export fn napi_get_dataview_info(env: napi_env, dataview: napi_value, bytele
byte_offset.* = array_buffer.offset;
return .ok;
}
-pub export fn napi_get_version(_: napi_env, result: [*]u32) napi_status {
- result[0] = bun.Global.version.major;
- result[1] = bun.Global.version.minor;
- result[2] = bun.Global.version.patch;
+pub export fn napi_get_version(_: napi_env, result: *u32) napi_status {
+ result.* = NAPI_VERSION;
return .ok;
}
pub export fn napi_create_promise(env: napi_env, deferred: *napi_deferred, promise: *napi_value) napi_status {
diff --git a/types/bun/bun.d.ts b/types/bun/bun.d.ts
index f99694e01..bb59e1c15 100644
--- a/types/bun/bun.d.ts
+++ b/types/bun/bun.d.ts
@@ -320,27 +320,38 @@ declare module "bun" {
*/
export function fileURLToPath(url: URL): string;
- interface Sink<T> {
- write(chunk: string | ArrayBufferView | ArrayBuffer): number;
- flush(): Promise<number> | number;
- end(): T;
- }
-
/**
- * Fast incremental writer that becomes an ArrayBuffer on end().
+ * Fast incremental writer that becomes an `ArrayBuffer` on end().
*/
- export class ArrayBufferSink implements Sink<ArrayBuffer> {
+ export class ArrayBufferSink {
constructor();
- start({
- stream = false,
- asUint8Array = false,
- highWaterMark = 2048,
- } = {}): void;
+ start(options?: {
+ asUint8Array?: boolean;
+ /**
+ * Preallocate an internal buffer of this size
+ * This can significantly improve performance when the chunk size is small
+ */
+ highWaterMark?: number;
+ /**
+ * On {@link ArrayBufferSink.flush}, return the written data as a `Uint8Array`.
+ * Writes will restart from the beginning of the buffer.
+ */
+ stream?: boolean;
+ }): void;
write(chunk: string | ArrayBufferView | ArrayBuffer): number;
- flush(): number;
- end(): ArrayBuffer;
+ /**
+ * Flush the internal buffer
+ *
+ * If {@link ArrayBufferSink.start} was passed a `stream` option, this will return a `ArrayBuffer`
+ * If {@link ArrayBufferSink.start} was passed a `stream` option and `asUint8Array`, this will return a `Uint8Array`
+ * Otherwise, this will return the number of bytes written since the last flush
+ *
+ * This API might change later to separate Uint8ArraySink and ArrayBufferSink
+ */
+ flush(): number | Uint8Array | ArrayBuffer;
+ end(): ArrayBuffer | Uint8Array;
}
/**
diff --git a/types/bun/globals.d.ts b/types/bun/globals.d.ts
index ffff7ab5e..995327bd8 100644
--- a/types/bun/globals.d.ts
+++ b/types/bun/globals.d.ts
@@ -450,8 +450,13 @@ type ReferrerPolicy =
| "unsafe-url";
type RequestInfo = Request | string;
-type BodyInit = XMLHttpRequestBodyInit;
+type BodyInit = ReadableStream | XMLHttpRequestBodyInit;
type XMLHttpRequestBodyInit = Blob | BufferSource | string;
+type ReadableStreamController<T> = ReadableStreamDefaultController<T>;
+type ReadableStreamDefaultReadResult<T> =
+ | ReadableStreamDefaultReadValueResult<T>
+ | ReadableStreamDefaultReadDoneResult;
+type ReadableStreamReader<T> = ReadableStreamDefaultReader<T>;
interface RequestInit {
/**
@@ -1423,3 +1428,301 @@ declare var Loader: {
*/
resolveSync: (specifier: string, from: string) => string;
};
+
+/** This Streams API interface represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. */
+interface ReadableStream<R = any> {
+ readonly locked: boolean;
+ cancel(reason?: any): Promise<void>;
+ getReader(): ReadableStreamDefaultReader<R>;
+ pipeThrough<T>(
+ transform: ReadableWritablePair<T, R>,
+ options?: StreamPipeOptions
+ ): ReadableStream<T>;
+ pipeTo(
+ destination: WritableStream<R>,
+ options?: StreamPipeOptions
+ ): Promise<void>;
+ tee(): [ReadableStream<R>, ReadableStream<R>];
+ forEach(
+ callbackfn: (value: any, key: number, parent: ReadableStream<R>) => void,
+ thisArg?: any
+ ): void;
+}
+
+declare var ReadableStream: {
+ prototype: ReadableStream;
+ new <R = any>(
+ underlyingSource?: DirectUnderlyingSource<R> | UnderlyingSource<R>,
+ strategy?: QueuingStrategy<R>
+ ): ReadableStream<R>;
+};
+
+interface QueuingStrategy<T = any> {
+ highWaterMark?: number;
+ size?: QueuingStrategySize<T>;
+}
+
+interface QueuingStrategyInit {
+ /**
+ * Creates a new ByteLengthQueuingStrategy with the provided high water mark.
+ *
+ * Note that the provided high water mark will not be validated ahead of time. Instead, if it is negative, NaN, or not a number, the resulting ByteLengthQueuingStrategy will cause the corresponding stream constructor to throw.
+ */
+ highWaterMark: number;
+}
+
+/** This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. */
+interface ByteLengthQueuingStrategy extends QueuingStrategy<ArrayBufferView> {
+ readonly highWaterMark: number;
+ readonly size: QueuingStrategySize<ArrayBufferView>;
+}
+
+declare var ByteLengthQueuingStrategy: {
+ prototype: ByteLengthQueuingStrategy;
+ new (init: QueuingStrategyInit): ByteLengthQueuingStrategy;
+};
+
+interface ReadableStreamDefaultController<R = any> {
+ readonly desiredSize: number | null;
+ close(): void;
+ enqueue(chunk?: R): void;
+ error(e?: any): void;
+}
+
+interface ReadableStreamDirectController {
+ close(error?: Error): void;
+ write(data: ArrayBufferView | ArrayBuffer | string): number | Promise<number>;
+ end(): number | Promise<number>;
+ flush(): number | Promise<number>;
+ start(): void;
+}
+
+declare var ReadableStreamDefaultController: {
+ prototype: ReadableStreamDefaultController;
+ new (): ReadableStreamDefaultController;
+};
+
+interface ReadableStreamDefaultReader<R = any>
+ extends ReadableStreamGenericReader {
+ read(): Promise<ReadableStreamDefaultReadResult<R>>;
+ releaseLock(): void;
+}
+
+declare var ReadableStreamDefaultReader: {
+ prototype: ReadableStreamDefaultReader;
+ new <R = any>(stream: ReadableStream<R>): ReadableStreamDefaultReader<R>;
+};
+
+interface ReadableStreamGenericReader {
+ readonly closed: Promise<undefined>;
+ cancel(reason?: any): Promise<void>;
+}
+
+interface ReadableStreamDefaultReadDoneResult {
+ done: true;
+ value?: undefined;
+}
+
+interface ReadableStreamDefaultReadValueResult<T> {
+ done: false;
+ value: T;
+}
+
+interface ReadableWritablePair<R = any, W = any> {
+ readable: ReadableStream<R>;
+ /**
+ * Provides a convenient, chainable way of piping this readable stream through a transform stream (or any other { writable, readable } pair). It simply pipes the stream into the writable side of the supplied pair, and returns the readable side for further use.
+ *
+ * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader.
+ */
+ writable: WritableStream<W>;
+}
+
+/** This Streams API interface provides a standard abstraction for writing streaming data to a destination, known as a sink. This object comes with built-in backpressure and queuing. */
+interface WritableStream<W = any> {
+ readonly locked: boolean;
+ abort(reason?: any): Promise<void>;
+ close(): Promise<void>;
+ getWriter(): WritableStreamDefaultWriter<W>;
+}
+
+declare var WritableStream: {
+ prototype: WritableStream;
+ new <W = any>(
+ underlyingSink?: UnderlyingSink<W>,
+ strategy?: QueuingStrategy<W>
+ ): WritableStream<W>;
+};
+
+/** This Streams API interface represents a controller allowing control of a WritableStream's state. When constructing a WritableStream, the underlying sink is given a corresponding WritableStreamDefaultController instance to manipulate. */
+interface WritableStreamDefaultController {
+ error(e?: any): void;
+}
+
+declare var WritableStreamDefaultController: {
+ prototype: WritableStreamDefaultController;
+ new (): WritableStreamDefaultController;
+};
+
+/** This Streams API interface is the object returned by WritableStream.getWriter() and once created locks the < writer to the WritableStream ensuring that no other streams can write to the underlying sink. */
+interface WritableStreamDefaultWriter<W = any> {
+ readonly closed: Promise<undefined>;
+ readonly desiredSize: number | null;
+ readonly ready: Promise<undefined>;
+ abort(reason?: any): Promise<void>;
+ close(): Promise<void>;
+ releaseLock(): void;
+ write(chunk?: W): Promise<void>;
+}
+
+declare var WritableStreamDefaultWriter: {
+ prototype: WritableStreamDefaultWriter;
+ new <W = any>(stream: WritableStream<W>): WritableStreamDefaultWriter<W>;
+};
+
+interface TransformerFlushCallback<O> {
+ (controller: TransformStreamDefaultController<O>): void | PromiseLike<void>;
+}
+
+interface TransformerStartCallback<O> {
+ (controller: TransformStreamDefaultController<O>): any;
+}
+
+interface TransformerTransformCallback<I, O> {
+ (
+ chunk: I,
+ controller: TransformStreamDefaultController<O>
+ ): void | PromiseLike<void>;
+}
+
+interface UnderlyingSinkAbortCallback {
+ (reason?: any): void | PromiseLike<void>;
+}
+
+interface UnderlyingSinkCloseCallback {
+ (): void | PromiseLike<void>;
+}
+
+interface UnderlyingSinkStartCallback {
+ (controller: WritableStreamDefaultController): any;
+}
+
+interface UnderlyingSinkWriteCallback<W> {
+ (
+ chunk: W,
+ controller: WritableStreamDefaultController
+ ): void | PromiseLike<void>;
+}
+
+interface UnderlyingSourceCancelCallback {
+ (reason?: any): void | PromiseLike<void>;
+}
+
+interface UnderlyingSink<W = any> {
+ abort?: UnderlyingSinkAbortCallback;
+ close?: UnderlyingSinkCloseCallback;
+ start?: UnderlyingSinkStartCallback;
+ type?: undefined | "default" | "bytes";
+ write?: UnderlyingSinkWriteCallback<W>;
+}
+
+interface UnderlyingSource<R = any> {
+ cancel?: UnderlyingSourceCancelCallback;
+ pull?: UnderlyingSourcePullCallback<R>;
+ start?: UnderlyingSourceStartCallback<R>;
+ type?: undefined;
+}
+
+interface DirectUnderlyingSource<R = any> {
+ cancel?: UnderlyingSourceCancelCallback;
+ pull: (
+ controller: ReadableStreamDirectController
+ ) => void | PromiseLike<void>;
+ type: "direct";
+}
+
+interface UnderlyingSourcePullCallback<R> {
+ (controller: ReadableStreamController<R>): void | PromiseLike<void>;
+}
+
+interface UnderlyingSourceStartCallback<R> {
+ (controller: ReadableStreamController<R>): any;
+}
+
+interface GenericTransformStream {
+ readonly readable: ReadableStream;
+ readonly writable: WritableStream;
+}
+
+interface TransformStream<I = any, O = any> {
+ readonly readable: ReadableStream<O>;
+ readonly writable: WritableStream<I>;
+}
+
+declare var TransformStream: {
+ prototype: TransformStream;
+ new <I = any, O = any>(
+ transformer?: Transformer<I, O>,
+ writableStrategy?: QueuingStrategy<I>,
+ readableStrategy?: QueuingStrategy<O>
+ ): TransformStream<I, O>;
+};
+
+interface TransformStreamDefaultController<O = any> {
+ readonly desiredSize: number | null;
+ enqueue(chunk?: O): void;
+ error(reason?: any): void;
+ terminate(): void;
+}
+
+declare var TransformStreamDefaultController: {
+ prototype: TransformStreamDefaultController;
+ new (): TransformStreamDefaultController;
+};
+
+interface StreamPipeOptions {
+ preventAbort?: boolean;
+ preventCancel?: boolean;
+ /**
+ * Pipes this readable stream to a given writable stream destination. The way in which the piping process behaves under various error conditions can be customized with a number of passed options. It returns a promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered.
+ *
+ * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader.
+ *
+ * Errors and closures of the source and destination streams propagate as follows:
+ *
+ * An error in this source readable stream will abort destination, unless preventAbort is truthy. The returned promise will be rejected with the source's error, or with any error that occurs during aborting the destination.
+ *
+ * An error in destination will cancel this source readable stream, unless preventCancel is truthy. The returned promise will be rejected with the destination's error, or with any error that occurs during canceling the source.
+ *
+ * When this source readable stream closes, destination will be closed, unless preventClose is truthy. The returned promise will be fulfilled once this process completes, unless an error is encountered while closing the destination, in which case it will be rejected with that error.
+ *
+ * If destination starts out closed or closing, this source readable stream will be canceled, unless preventCancel is true. The returned promise will be rejected with an error indicating piping to a closed stream failed, or with any error that occurs during canceling the source.
+ *
+ * The signal option can be set to an AbortSignal to allow aborting an ongoing pipe operation via the corresponding AbortController. In this case, this source readable stream will be canceled, and destination aborted, unless the respective options preventCancel or preventAbort are set.
+ */
+ preventClose?: boolean;
+ signal?: AbortSignal;
+}
+
+/** This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. */
+interface CountQueuingStrategy extends QueuingStrategy {
+ readonly highWaterMark: number;
+ readonly size: QueuingStrategySize;
+}
+
+declare var CountQueuingStrategy: {
+ prototype: CountQueuingStrategy;
+ new (init: QueuingStrategyInit): CountQueuingStrategy;
+};
+
+interface QueuingStrategySize<T = any> {
+ (chunk?: T): number;
+}
+
+interface Transformer<I = any, O = any> {
+ flush?: TransformerFlushCallback<O>;
+ readableType?: undefined;
+ start?: TransformerStartCallback<O>;
+ transform?: TransformerTransformCallback<I, O>;
+ writableType?: undefined;
+}
diff --git a/types/bun/jsc.d.ts b/types/bun/jsc.d.ts
index 0b377ef8e..c4dce65df 100644
--- a/types/bun/jsc.d.ts
+++ b/types/bun/jsc.d.ts
@@ -25,7 +25,7 @@ declare module "bun:jsc" {
};
export function getRandomSeed(): number;
export function setRandomSeed(value: number): void;
- export function isRope(input: string): bool;
+ export function isRope(input: string): boolean;
export function callerSourceOrigin(): string;
export function noFTL(func: Function): Function;
export function noOSRExitFuzzing(func: Function): Function;
diff --git a/types/bun/sqlite.d.ts b/types/bun/sqlite.d.ts
index 02d1007d0..d1f4d5b16 100644
--- a/types/bun/sqlite.d.ts
+++ b/types/bun/sqlite.d.ts
@@ -28,7 +28,7 @@ declare module "bun:sqlite" {
/**
* Open or create a SQLite3 database
*
- * @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` for an in-memory database.
+ * @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` or undefined for an in-memory database.
* @param options defaults to `{readwrite: true, create: true}`. If a number, then it's treated as `SQLITE_OPEN_*` constant flags.
*
* @example
@@ -60,7 +60,7 @@ declare module "bun:sqlite" {
* ```
*/
constructor(
- filename: string,
+ filename?: string,
options?:
| number
| {
@@ -167,14 +167,14 @@ declare module "bun:sqlite" {
*/
run<ParamsType = SQLQueryBindings>(
sqlQuery: string,
- ...bindings: ParamsType
+ ...bindings: ParamsType[]
): void;
/**
This is an alias of {@link Database.prototype.run}
*/
exec<ParamsType = SQLQueryBindings>(
sqlQuery: string,
- ...bindings: ParamsType
+ ...bindings: ParamsType[]
): void;
/**
@@ -357,8 +357,6 @@ declare module "bun:sqlite" {
};
}
- export { default as Database };
-
/**
* A prepared statement.
*
@@ -744,4 +742,6 @@ declare module "bun:sqlite" {
| boolean
| null
| Record<string, string | bigint | TypedArray | number | boolean | null>;
+
+ export default Database;
}