aboutsummaryrefslogtreecommitdiff
path: root/test/bun.js/fetch-gzip.test.ts
diff options
context:
space:
mode:
authorGravatar Jarred SUmner <jarred@jarredsumner.com> 2022-11-12 18:32:53 -0800
committerGravatar Jarred SUmner <jarred@jarredsumner.com> 2022-11-12 18:32:53 -0800
commit1cce9da80a51d49e423223f24f94fee6a044ab10 (patch)
tree56daa58619a6544ec6a6a60334e2db051f7a274b /test/bun.js/fetch-gzip.test.ts
parent21bf3ddaf23c842dc12a1d76dbd3b48daf08f349 (diff)
downloadbun-1cce9da80a51d49e423223f24f94fee6a044ab10.tar.gz
bun-1cce9da80a51d49e423223f24f94fee6a044ab10.tar.zst
bun-1cce9da80a51d49e423223f24f94fee6a044ab10.zip
Fix memory leak in gzip pool + add test for gzip'd data
Diffstat (limited to 'test/bun.js/fetch-gzip.test.ts')
-rw-r--r--test/bun.js/fetch-gzip.test.ts116
1 files changed, 116 insertions, 0 deletions
diff --git a/test/bun.js/fetch-gzip.test.ts b/test/bun.js/fetch-gzip.test.ts
new file mode 100644
index 000000000..a75e01701
--- /dev/null
+++ b/test/bun.js/fetch-gzip.test.ts
@@ -0,0 +1,116 @@
+import { concatArrayBuffers } from "bun";
+import { it, describe, expect } from "bun:test";
+import fs from "fs";
+import { gc } from "./gc";
+
+it("fetch() with a buffered gzip response works (one chunk)", async () => {
+ var server = Bun.serve({
+ port: 6025,
+
+ async fetch(req) {
+ return new Response(
+ await Bun.file(import.meta.dir + "/fixture.html.gz").arrayBuffer(),
+ {
+ headers: {
+ "Content-Encoding": "gzip",
+ "Content-Type": "text/html; charset=utf-8",
+ },
+ },
+ );
+ },
+ });
+
+ const res = await fetch(
+ `http://${server.hostname}:${server.port}`,
+ {},
+ { verbose: true },
+ );
+ const arrayBuffer = await res.arrayBuffer();
+ expect(
+ new Buffer(arrayBuffer).equals(
+ new Buffer(
+ await Bun.file(import.meta.dir + "/fixture.html").arrayBuffer(),
+ ),
+ ),
+ ).toBe(true);
+ server.stop();
+});
+
+it("fetch() with a gzip response works (one chunk)", async () => {
+ var server = Bun.serve({
+ port: 6023,
+
+ fetch(req) {
+ return new Response(Bun.file(import.meta.dir + "/fixture.html.gz"), {
+ headers: {
+ "Content-Encoding": "gzip",
+ "Content-Type": "text/html; charset=utf-8",
+ },
+ });
+ },
+ });
+
+ const res = await fetch(`http://${server.hostname}:${server.port}`);
+ const arrayBuffer = await res.arrayBuffer();
+ expect(
+ new Buffer(arrayBuffer).equals(
+ new Buffer(
+ await Bun.file(import.meta.dir + "/fixture.html").arrayBuffer(),
+ ),
+ ),
+ ).toBe(true);
+ server.stop();
+});
+
+it("fetch() with a gzip response works (multiple chunks)", async () => {
+ var server = Bun.serve({
+ port: 6024,
+
+ fetch(req) {
+ return new Response(
+ new ReadableStream({
+ type: "direct",
+ async pull(controller) {
+ var chunks: ArrayBuffer[] = [];
+ const buffer = await Bun.file(
+ import.meta.dir + "/fixture.html.gz",
+ ).arrayBuffer();
+ var remaining = buffer;
+ for (var i = 100; i < buffer.byteLength; i += 100) {
+ var chunk = remaining.slice(0, i);
+ remaining = remaining.slice(i);
+ controller.write(chunk);
+ chunks.push(chunk);
+ await controller.flush();
+ }
+
+ await controller.flush();
+ // sanity check
+ expect(
+ new Buffer(concatArrayBuffers(chunks)).equals(new Buffer(buffer)),
+ ).toBe(true);
+
+ controller.end();
+ },
+ }),
+ {
+ headers: {
+ "Content-Encoding": "gzip",
+ "Content-Type": "text/html; charset=utf-8",
+ "Content-Length": "1",
+ },
+ },
+ );
+ },
+ });
+
+ const res = await fetch(`http://${server.hostname}:${server.port}`, {});
+ const arrayBuffer = await res.arrayBuffer();
+ expect(
+ new Buffer(arrayBuffer).equals(
+ new Buffer(
+ await Bun.file(import.meta.dir + "/fixture.html").arrayBuffer(),
+ ),
+ ),
+ ).toBe(true);
+});