import { it, describe, expect } from "bun:test";
import fs from "fs";
function gc() {
Bun.gc(true);
}
describe("fetch", () => {
const urls = ["https://example.com", "http://example.com"];
for (let url of urls) {
gc();
it(url, async () => {
gc();
const response = await fetch(url);
gc();
const text = await response.text();
gc();
expect(
fs.readFileSync(
import.meta.path.substring(0, import.meta.path.lastIndexOf("/")) +
"/fetch.js.txt",
"utf8"
)
).toBe(text);
});
}
});
function testBlobInterface(blobbyConstructor, hasBlobFn) {
for (let withGC of [false, true]) {
for (let jsonObject of [
{ hello: true },
{
hello:
"π π π π π π
π π€£ π₯² βΊοΈ π π π π π π π π₯° π π π π π π π π π€ͺ π€¨ π§ π€ π π₯Έ π€© π₯³",
},
]) {
it(`${jsonObject.hello === true ? "latin1" : "utf16"} json${
withGC ? " (with gc) " : ""
}`, async () => {
if (withGC) gc();
var response = blobbyConstructor(JSON.stringify(jsonObject));
if (withGC) gc();
expect(JSON.stringify(await response.json())).toBe(
JSON.stringify(jsonObject)
);
if (withGC) gc();
});
it(`${
jsonObject.hello === true ? "latin1" : "utf16"
} arrayBuffer -> json${withGC ? " (with gc) " : ""}`, async () => {
if (withGC) gc();
var response = blobbyConstructor(
new TextEncoder().encode(JSON.stringify(jsonObject))
);
if (withGC) gc();
expect(JSON.stringify(await response.json())).toBe(
JSON.stringify(jsonObject)
);
if (withGC) gc();
});
it(`${jsonObject.hello === true ? "latin1" : "utf16"} text${
withGC ? " (with gc) " : ""
}`, async () => {
if (withGC) gc();
var response = blobbyConstructor(JSON.stringify(jsonObject));
if (withGC) gc();
expect(await response.text()).toBe(JSON.stringify(jsonObject));
if (withGC) gc();
});
it(`${
jsonObject.hello === true ? "latin1" : "utf16"
} arrayBuffer -> text${withGC ? " (with gc) " : ""}`, async () => {
if (withGC) gc();
var response = blobbyConstructor(
new TextEncoder().encode(JSON.stringify(jsonObject))
);
if (withGC) gc();
expect(await response.text()).toBe(JSON.stringify(jsonObject));
if (withGC) gc();
});
it(`${jsonObject.hello === true ? "latin1" : "utf16"} arrayBuffer${
withGC ? " (with gc) " : ""
}`, async () => {
if (withGC) gc();
var response = blobbyConstructor(JSON.stringify(jsonObject));
if (withGC) gc();
const bytes = new TextEncoder().encode(JSON.stringify(jsonObject));
if (withGC) gc();
const compare = new Uint8Array(await response.arrayBuffer());
if (withGC) gc();
for (let i = 0; i < compare.length; i++) {
if (withGC) gc();
expect(compare[i]).toBe(bytes[i]);
if (withGC) gc();
}
if (withGC) gc();
});
it(`${
jsonObject.hello === true ? "latin1" : "utf16"
} arrayBuffer -> arrayBuffer${withGC ? " (with gc) " : ""}`, async () => {
if (withGC) gc();
var response = blobbyConstructor(
new TextEncoder().encode(JSON.stringify(jsonObject))
);
if (withGC) gc();
const bytes = new TextEncoder().encode(JSON.stringify(jsonObject));
if (withGC) gc();
const compare = new Uint8Array(await response.arrayBuffer());
if (withGC) gc();
for (let i = 0; i < compare.length; i++) {
if (withGC) gc();
expect(compare[i]).toBe(bytes[i]);
if (withGC) gc();
}
if (withGC) gc();
});
hasBlobFn &&
it(`${jsonObject.hello === true ? "latin1" : "utf16"} blob${
withGC ? " (with gc) " : ""
}`, async () => {
if (withGC) gc();
const text = JSON.stringify(jsonObject);
var response = blobbyConstructor(text);
if (withGC) gc();
const size = new TextEncoder().encode(text).byteLength;
if (withGC) gc();
const blobed = await response.blob();
if (withGC) gc();
expect(blobed instanceof Blob).toBe(true);
if (withGC) gc();
expect(blobed.size).toBe(size);
if (withGC) gc();
expect(blobed.type).toBe("");
if (withGC) gc();
blobed.type = "application/json";
if (withGC) gc();
expect(blobed.type).toBe("application/json");
if (withGC) gc();
const out = await blobed.text();
expect(out).toBe(text);
if (withGC) gc();
await new Promise((resolve) => setTimeout(resolve, 1));
if (withGC) gc();
expect(out).toBe(text);
const first = await blobed.arrayBuffer();
const initial = first[0];
first[0] = 254;
const second = await blobed.arrayBuffer();
expect(second[0]).toBe(initial);
expect(first[0]).toBe(254);
});
}
}
}
describe("Blob", () => {
testBlobInterface((data) => new Blob([data]));
var blobConstructorValues = [
["123", "456"],
["123", 456],
["123", "456", "789"],
["123", 456, 789],
[1, 2, 3, 4, 5, 6, 7, 8, 9],
[Uint8Array.from([1, 2, 3, 4, 5, 6, 7, 9])],
[Uint8Array.from([1, 2, 3, 4]), "5678", 9],
[new Blob([Uint8Array.from([1, 2, 3, 4])]), "5678", 9],
[
new Blob([
new TextEncoder().encode(
"π π π π π π
π π€£ π₯² βΊοΈ π π π π π π π π₯° π π π π π π π π π€ͺ π€¨ π§ π€ π π₯Έ π€© π₯³"
),
]),
],
[
new TextEncoder().encode(
"π π π π π π
π π€£ π₯² βΊοΈ π π π π π π π π₯° π π π π π π π π π€ͺ π€¨ π§ π€ π π₯Έ π€© π₯³"
),
],
];
var expected = [
"123456",
"123456",
"123456789",
"123456789",
"123456789",
"\x01\x02\x03\x04\x05\x06\x07\t",
"\x01\x02\x03\x0456789",
"\x01\x02\x03\x0456789",
"π π π π π π
π π€£ π₯² βΊοΈ π π π π π π π π₯° π π π π π π π π π€ͺ π€¨ π§ π€ π π₯Έ π€© π₯³",
"π π π π π π
π π€£ π₯² βΊοΈ π π π π π π π π₯° π π π π π π π π π€ͺ π€¨ π§ π€ π π₯Έ π€© π₯³",
];
it(`blobConstructorValues`, async () => {
for (let i = 0; i < blobConstructorValues.length; i++) {
var response = new Blob(blobConstructorValues[i]);
const res = await response.text();
if (res !== expected[i]) {
throw new Error(
`Failed: ${expected[i]
.split("")
.map((a) => a.charCodeAt(0))}, received: ${res
.split("")
.map((a) => a.charCodeAt(0))}`
);
}
expect(res).toBe(expected[i]);
}
});
for (let withGC of [false, true]) {
it(`Blob.slice() ${withGC ? " with gc" : ""}`, async () => {
var parts = ["hello", " ", "world"];
if (withGC) gc();
var str = parts.join("");
if (withGC) gc();
var combined = new Blob(parts);
if (withGC) gc();
for (let part of parts) {
if (withGC) gc();
expect(
await combined
.slice(str.indexOf(part), str.indexOf(part) + part.length)
.text()
).toBe(part);
if (withGC) gc();
}
if (withGC) gc();
for (let part of parts) {
if (withGC) gc();
expect(
await combined
.slice(str.indexOf(part), str.indexOf(part) + part.length)
.text()
).toBe(part);
if (withGC) gc();
}
});
}
});
describe("Response", () => {
describe("Response.json", () => {
it("works", async () => {
const inputs = [
"hellooo",
[[123], 456, 789],
{ hello: "world" },
{ ok: "π π π π₯° π " },
];
for (let input of inputs) {
const output = JSON.stringify(input);
expect(await Response.json(input).text()).toBe(output);
}
// JSON.stringify() returns undefined
expect(await Response.json().text()).toBe("");
// JSON.stringify("") returns '""'
expect(await Response.json("").text()).toBe('""');
});
it("sets the content-type header", () => {
let response = Response.json("hello");
expect(response.type).toBe("basic");
expect(response.headers.get("content-type")).toBe(
"application/json;charset=utf-8"
);
expect(response.status).toBe(200);
});
it("supports number status code", () => {
let response = Response.json("hello", 407);
expect(response.type).toBe("basic");
expect(response.headers.get("content-type")).toBe(
"application/json;charset=utf-8"
);
expect(response.status).toBe(407);
});
it("supports headers", () => {
var response = Response.json("hello", {
headers: {
"content-type": "potato",
"x-hello": "world",
},
status: 408,
});
expect(response.headers.get("x-hello")).toBe("world");
expect(response.status).toBe(408);
});
});
describe("Response.redirect", () => {
it("works", () => {
const inputs = [
"http://example.com",
"http://example.com/",
"http://example.com/hello",
"http://example.com/hello/",
"http://example.com/hello/world",
"http://example.com/hello/world/",
];
for (let input of inputs) {
expect(Response.redirect(input).headers.get("Location")).toBe(input);
}
});
it("supports headers", () => {
var response = Response.redirect("https://example.com", {
headers: {
"content-type": "potato",
"x-hello": "world",
Location: "https://wrong.com",
},
status: 408,
});
expect(response.headers.get("x-hello")).toBe("world");
expect(response.headers.get("Location")).toBe("https://example.com");
expect(response.status).toBe(302);
expect(response.type).toBe("basic");
expect(response.ok).toBe(false);
});
});
describe("Response.error", () => {
it("works", () => {
expect(Response.error().type).toBe("error");
expect(Response.error().ok).toBe(false);
expect(Response.error().status).toBe(0);
});
});
it("clone", async () => {
gc();
var body = new Response("
hello
", {
headers: {
"content-type": "text/html; charset=utf-8",
},
});
gc();
var clone = body.clone();
gc();
body.headers.set("content-type", "text/plain");
gc();
expect(clone.headers.get("content-type")).toBe("text/html; charset=utf-8");
gc();
expect(body.headers.get("content-type")).toBe("text/plain");
gc();
expect(await clone.text()).toBe("hello
");
gc();
});
it("invalid json", async () => {
gc();
var body = new Response("hello
", {
headers: {
"content-type": "text/html; charset=utf-8",
},
});
try {
await body.json();
expect(false).toBe(true);
} catch (exception) {
expect(exception instanceof SyntaxError);
}
});
testBlobInterface((data) => new Response(data), true);
});
describe("Request", () => {
it("clone", async () => {
gc();
var body = new Request("https://hello.com", {
headers: {
"content-type": "text/html; charset=utf-8",
},
body: "hello
",
});
gc();
expect(body.headers.get("content-type")).toBe("text/html; charset=utf-8");
gc();
var clone = body.clone();
gc();
body.headers.set("content-type", "text/plain");
gc();
expect(clone.headers.get("content-type")).toBe("text/html; charset=utf-8");
gc();
expect(body.headers.get("content-type")).toBe("text/plain");
gc();
expect(await clone.text()).toBe("hello
");
gc();
});
testBlobInterface(
(data) => new Request("https://hello.com", { body: data }),
true
);
});
describe("Headers", () => {
it("writes", async () => {
var headers = new Headers({
"content-type": "text/html; charset=utf-8",
});
gc();
expect(headers.get("content-type")).toBe("text/html; charset=utf-8");
gc();
headers.delete("content-type");
gc();
expect(headers.get("content-type")).toBe(null);
gc();
headers.append("content-type", "text/plain");
gc();
expect(headers.get("content-type")).toBe("text/plain");
gc();
headers.append("content-type", "text/plain");
gc();
expect(headers.get("content-type")).toBe("text/plain, text/plain");
gc();
headers.set("content-type", "text/html; charset=utf-8");
gc();
expect(headers.get("content-type")).toBe("text/html; charset=utf-8");
headers.delete("content-type");
gc();
expect(headers.get("content-type")).toBe(null);
gc();
});
});
=dave/postinstall&follow=1'>commitdiff
|
| Age | Commit message (Collapse) | Author | Files | Lines |
|
|
|
|
|
* fix test
* ok
* cm
* EE
* remove the hack we didnt need
|
|
* fix stream unref and resuming stream
* fix `child-process-stdio` test
|
|
* Improve test documentation
* Update nodejs compat docs with tty
* Add debugger guide
* Document Bun.inspect.custom, improve bun test nav
* Address reviews
* Update Bun.file types
* Add Nuxt guide
* Add tty types
|
|
|
|
|
|
have `code` set to `MODULE_NOT_FOUND` (#4244)
* ResolveMessage
* Fix it
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
* streams non compressed data in 64kb chunks (at least)
* fmt
* wip remove pause
* fix default streaming and buffering
* fix atomic lags
* fix size
* make chunked encoding work again (WIP streaming chunked)
* WIP: chunked encoding streaming
* fix end of streamings
* working streaming + compression
* add fixes + tests
* fmt + fix proxy
* fix oopsies
* codegen after merge
* fmt + fixes
* more fixes
* more fixes and logs
* avoid double free
* check empty before pop
* check empty on pop
* fix copy to real when complete
* remove unnecessary logs
* better has_schedule_callback swap, body locked size helper, remove isEmpty from unbounded_queue pop
* fix response ref, fix body_size
* add deflate support, fix error throw, add more tests
* codegen after merge
* remove logs, add connection close test
* fix macOS build
* fix redirect error option
* make body_size more clear
* support new Reponse(response)
* toString DOMWrapper objects properly instead of supporting response in Response constructor
* ignore headers with no name, add more tests
* oops
* handle transform with fetch
* add gz image stream test
* remove duplicate test
* fix missing chunk on macOS under pressure
* oops include all OS
* some fixes
* compare buffers instead of sizes
* refactor err.err and protect it
|
|
|
|
|
|
* Fix(bundler): allow generating binary file in nested path.
Close: #4195
* Add read flag for fd.
* refactor
|
|
|
|
* Fix EC constructor
* make js
|
|
* Implement napi_ref_threadsafe_function
* work on this
* i hate event loops
* little better
* clean
|
|
* add Bun.inspect.custom
* test
* Add Types
|
|
* Fixes #4089
* Update bindings.cpp
* address PR feedback
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
Fix typo in README.md
|
|
|
|
|
|
cc @paperdave, code generator script misses a constructor decl when this isn't true
|
|
|
|
|
|
* Fixes https://github.com/oven-sh/bun/issues/1675
* Add fallback for Bun.write
* Update blob.zig
* Fix test
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
* Implement `--inspect-brk`
* Bump WebKit
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
cc @Hanaasagi
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
* Fix(bundler): use different alias mappings based on the target.
Close: #3844
* chore: reduce duplicated code.
* chore: split to two separate ComptimeStringMap.
|
|
|
|
This waits for the inspector to connect before beginning execution
|
|
|
|
|
|
|
|
|
|
|
|
|
|
`node_api_throw_syntax_error`
These were marked as experimental
|
|
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
* Add inline sourcemaps when --inspect is enabled
* Add some assertions
* Update javascript.zig
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
|
|
* tty `WriteStream`, `ReadStream`, and rawmode
* tests
* refactor prototypes
* fix failing test
* fix test and library usage
* more merge
* fix child_process test
* create pseudo terminal for tty tests
* match node logic
* handle invalid tty
* close descriptors
* move tests to another process
* fix test again
* fix test on linux
|
|
|