aboutsummaryrefslogtreecommitdiff
path: root/integration/bunjs-only-snippets/streams.test.js
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2022-06-07 22:32:46 -0700
committerGravatar GitHub <noreply@github.com> 2022-06-07 22:32:46 -0700
commit43de33afc7fcc4cab25f578566e225ba9e4d4258 (patch)
tree141676095981741c3a5740093fee79ed12d4edcd /integration/bunjs-only-snippets/streams.test.js
parent958fc3d4f5ba2a1fb5b5e1e2b9fe3a4500dbefc6 (diff)
downloadbun-43de33afc7fcc4cab25f578566e225ba9e4d4258.tar.gz
bun-43de33afc7fcc4cab25f578566e225ba9e4d4258.tar.zst
bun-43de33afc7fcc4cab25f578566e225ba9e4d4258.zip
Web Streams API (#176)
* [bun.js] `WritableStream`, `ReadableStream`, `TransformStream`, `WritableStreamDefaultController`, `ReadableStreamDefaultController` & more * Implement `Blob.stream()` * Update streams.test.js * Fix sourcemaps crash * [TextEncoder] 3x faster in hot loops * reading almost works * start to implement native streams * Implement `Blob.stream()` * Implement `Bun.file(pathOrFd).stream()` * Add an extra function * [fs.readFile] Improve performance * make jsc bindings a little easier to work with * fix segfault * faster async/await + readablestream optimizations * WebKit updates * More WebKit updates * Add releaseWEakrefs binding * `bun:jsc` * More streams * Update streams.test.js * Update Makefile * Update mimalloc * Update WebKit * Create bun-jsc.test.js * Faster ReadableStream * Fix off by one & exceptions * Handle empty files/blobs * Update streams.test.js * Move streams to it's own file * temp * impl #1 * take two * good enough for now * Implement `readableStreamToArray`, `readableStreamToArrayBuffer`, `concatArrayBuffers` * jsxOptimizationInlining * Fix crash * Add `jsxOptimizationInline` to Bun.Transpiler * Update Transpiler types * Update js_ast.zig * Automatically choose production mode when NODE_ENV="production" * Update cli.zig * [jsx] Handle defaultProps when inlining * Update transpiler.test.js * uncomment some tests Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Diffstat (limited to 'integration/bunjs-only-snippets/streams.test.js')
-rw-r--r--integration/bunjs-only-snippets/streams.test.js186
1 files changed, 186 insertions, 0 deletions
diff --git a/integration/bunjs-only-snippets/streams.test.js b/integration/bunjs-only-snippets/streams.test.js
new file mode 100644
index 000000000..d694be1ba
--- /dev/null
+++ b/integration/bunjs-only-snippets/streams.test.js
@@ -0,0 +1,186 @@
+import {
+ file,
+ gc,
+ readableStreamToArrayBuffer,
+ readableStreamToArray,
+} from "bun";
+import { expect, it } from "bun:test";
+import { writeFileSync } from "node:fs";
+
+it("exists globally", () => {
+ expect(typeof ReadableStream).toBe("function");
+ expect(typeof ReadableStreamBYOBReader).toBe("function");
+ expect(typeof ReadableStreamBYOBRequest).toBe("function");
+ expect(typeof ReadableStreamDefaultController).toBe("function");
+ expect(typeof ReadableStreamDefaultReader).toBe("function");
+ expect(typeof TransformStream).toBe("function");
+ expect(typeof TransformStreamDefaultController).toBe("function");
+ expect(typeof WritableStream).toBe("function");
+ expect(typeof WritableStreamDefaultController).toBe("function");
+ expect(typeof WritableStreamDefaultWriter).toBe("function");
+ expect(typeof ByteLengthQueuingStrategy).toBe("function");
+ expect(typeof CountQueuingStrategy).toBe("function");
+});
+
+it("ReadableStream (bytes)", async () => {
+ var stream = new ReadableStream({
+ start(controller) {
+ controller.enqueue(Buffer.from("abdefgh"));
+ },
+ pull(controller) {},
+ cancel() {},
+ type: "bytes",
+ });
+ const chunks = [];
+ const chunk = await stream.getReader().read();
+ chunks.push(chunk.value);
+ expect(chunks[0].join("")).toBe(Buffer.from("abdefgh").join(""));
+});
+
+it("ReadableStream (default)", async () => {
+ var stream = new ReadableStream({
+ start(controller) {
+ controller.enqueue(Buffer.from("abdefgh"));
+ controller.close();
+ },
+ pull(controller) {},
+ cancel() {},
+ });
+ const chunks = [];
+ const chunk = await stream.getReader().read();
+ chunks.push(chunk.value);
+ expect(chunks[0].join("")).toBe(Buffer.from("abdefgh").join(""));
+});
+
+it("readableStreamToArray", async () => {
+ var queue = [Buffer.from("abdefgh")];
+ var stream = new ReadableStream({
+ pull(controller) {
+ var chunk = queue.shift();
+ if (chunk) {
+ controller.enqueue(chunk);
+ } else {
+ controller.close();
+ }
+ },
+ cancel() {},
+ type: "bytes",
+ });
+
+ const chunks = await readableStreamToArray(stream);
+
+ expect(chunks[0].join("")).toBe(Buffer.from("abdefgh").join(""));
+});
+
+it("readableStreamToArrayBuffer (bytes)", async () => {
+ var queue = [Buffer.from("abdefgh")];
+ var stream = new ReadableStream({
+ pull(controller) {
+ var chunk = queue.shift();
+ if (chunk) {
+ controller.enqueue(chunk);
+ } else {
+ controller.close();
+ }
+ },
+ cancel() {},
+ type: "bytes",
+ });
+ const buffer = await readableStreamToArrayBuffer(stream);
+ expect(new TextDecoder().decode(new Uint8Array(buffer))).toBe("abdefgh");
+});
+
+it("readableStreamToArrayBuffer (default)", async () => {
+ var queue = [Buffer.from("abdefgh")];
+ var stream = new ReadableStream({
+ pull(controller) {
+ var chunk = queue.shift();
+ if (chunk) {
+ controller.enqueue(chunk);
+ } else {
+ controller.close();
+ }
+ },
+ cancel() {},
+ });
+
+ const buffer = await readableStreamToArrayBuffer(stream);
+ expect(new TextDecoder().decode(new Uint8Array(buffer))).toBe("abdefgh");
+});
+
+it("ReadableStream for Blob", async () => {
+ var blob = new Blob(["abdefgh", "ijklmnop"]);
+ expect(await blob.text()).toBe("abdefghijklmnop");
+ var stream = blob.stream();
+ const chunks = [];
+ var reader = stream.getReader();
+ while (true) {
+ const chunk = await reader.read();
+ if (chunk.done) break;
+ chunks.push(new TextDecoder().decode(chunk.value));
+ }
+ expect(chunks.join("")).toBe(
+ new TextDecoder().decode(Buffer.from("abdefghijklmnop"))
+ );
+});
+
+it("ReadableStream for File", async () => {
+ var blob = file(import.meta.dir + "/fetch.js.txt");
+ var stream = blob.stream(24);
+ const chunks = [];
+ var reader = stream.getReader();
+ stream = undefined;
+ while (true) {
+ const chunk = await reader.read();
+ gc(true);
+ if (chunk.done) break;
+ chunks.push(chunk.value);
+ expect(chunk.value.byteLength <= 24).toBe(true);
+ gc(true);
+ }
+ reader = undefined;
+ const output = new Uint8Array(await blob.arrayBuffer()).join("");
+ const input = chunks.map((a) => a.join("")).join("");
+ expect(output).toBe(input);
+ gc(true);
+});
+
+it("ReadableStream for File errors", async () => {
+ try {
+ var blob = file(import.meta.dir + "/fetch.js.txt.notfound");
+ blob.stream().getReader();
+ throw new Error("should not reach here");
+ } catch (e) {
+ expect(e.code).toBe("ENOENT");
+ expect(e.syscall).toBe("open");
+ }
+});
+
+it("ReadableStream for empty blob closes immediately", async () => {
+ var blob = new Blob([]);
+ var stream = blob.stream();
+ const chunks = [];
+ var reader = stream.getReader();
+ while (true) {
+ const chunk = await reader.read();
+ if (chunk.done) break;
+ chunks.push(chunk.value);
+ }
+
+ expect(chunks.length).toBe(0);
+});
+
+it("ReadableStream for empty file closes immediately", async () => {
+ writeFileSync("/tmp/bun-empty-file-123456", "");
+ var blob = file("/tmp/bun-empty-file-123456");
+ var stream = blob.stream();
+ const chunks = [];
+ var reader = stream.getReader();
+ while (true) {
+ const chunk = await reader.read();
+ if (chunk.done) break;
+ chunks.push(chunk.value);
+ }
+
+ expect(chunks.length).toBe(0);
+});