aboutsummaryrefslogtreecommitdiff
path: root/test/js/node/fs/fs.test.ts
diff options
context:
space:
mode:
authorGravatar Dylan Conway <35280289+dylan-conway@users.noreply.github.com> 2023-03-22 18:09:51 -0700
committerGravatar GitHub <noreply@github.com> 2023-03-22 18:09:51 -0700
commit31c2fea74af66d60dceab608b1cfdd9a3f08a7db (patch)
tree97f871a571ed2d48d3ef2d92240b3dcc0cdbf6b1 /test/js/node/fs/fs.test.ts
parenta5f92224b586289fc72f0abdb68b08eef9f017db (diff)
downloadbun-31c2fea74af66d60dceab608b1cfdd9a3f08a7db.tar.gz
bun-31c2fea74af66d60dceab608b1cfdd9a3f08a7db.tar.zst
bun-31c2fea74af66d60dceab608b1cfdd9a3f08a7db.zip
A couple bug fixes (#2458)
* fix valid status code range * update path * highwatermark option * throw DOMException * remove extra transpiler output * more transpiler tests * comment * get index not quickly * replace with `getDirectIndex` * update abort test * throw out of range status code * promisify test fix * move stdio test instance files * working crypto tests * allow duplicate set-cookie headers * different formatting * revert, fix will be in different pr * it is called * use min buffer size * fix url tests * null origin for other protocols * remove overload * add very large file test * await * coerce to int64 * 64 * no cast * add todo blob url tests * use `tryConvertToInt52`
Diffstat (limited to 'test/js/node/fs/fs.test.ts')
-rw-r--r--test/js/node/fs/fs.test.ts94
1 files changed, 90 insertions, 4 deletions
diff --git a/test/js/node/fs/fs.test.ts b/test/js/node/fs/fs.test.ts
index 4636d0d4b..644a3cf40 100644
--- a/test/js/node/fs/fs.test.ts
+++ b/test/js/node/fs/fs.test.ts
@@ -389,7 +389,7 @@ describe("lstat", () => {
});
it("folder metadata is correct", () => {
- const fileStats = lstatSync(new URL("../../test", import.meta.url).toString().slice("file://".length - 1));
+ const fileStats = lstatSync(new URL("../../../../test", import.meta.url).toString().slice("file://".length - 1));
expect(fileStats.isSymbolicLink()).toBe(false);
expect(fileStats.isFile()).toBe(false);
expect(fileStats.isDirectory()).toBe(true);
@@ -424,7 +424,7 @@ describe("stat", () => {
});
it("folder metadata is correct", () => {
- const fileStats = statSync(new URL("../../test", import.meta.url).toString().slice("file://".length - 1));
+ const fileStats = statSync(new URL("../../../../test", import.meta.url).toString().slice("file://".length - 1));
expect(fileStats.isSymbolicLink()).toBe(false);
expect(fileStats.isFile()).toBe(false);
expect(fileStats.isDirectory()).toBe(true);
@@ -605,8 +605,8 @@ describe("createReadStream", () => {
return await new Promise(resolve => {
stream.on("data", chunk => {
expect(chunk instanceof Buffer).toBe(true);
- expect(chunk.length).toBe(1);
- expect(chunk.toString()).toBe(data[i++]);
+ expect(chunk.length).toBe(22);
+ expect(chunk.toString()).toBe(data);
});
stream.on("end", () => {
@@ -614,6 +614,92 @@ describe("createReadStream", () => {
});
});
});
+
+ it("works (highWaterMark 1, 512 chunk)", async () => {
+ var stream = createReadStream(import.meta.dir + "/readLargeFileSync.txt", {
+ highWaterMark: 1,
+ });
+
+ var data = readFileSync(import.meta.dir + "/readLargeFileSync.txt", "utf8");
+ var i = 0;
+ return await new Promise(resolve => {
+ stream.on("data", chunk => {
+ expect(chunk instanceof Buffer).toBe(true);
+ expect(chunk.length).toBe(512);
+ expect(chunk.toString()).toBe(data.slice(i, i + 512));
+ i += 512;
+ });
+
+ stream.on("end", () => {
+ resolve(true);
+ });
+ });
+ });
+
+ it("works (512 chunk)", async () => {
+ var stream = createReadStream(import.meta.dir + "/readLargeFileSync.txt", {
+ highWaterMark: 512,
+ });
+
+ var data = readFileSync(import.meta.dir + "/readLargeFileSync.txt", "utf8");
+ var i = 0;
+ return await new Promise(resolve => {
+ stream.on("data", chunk => {
+ expect(chunk instanceof Buffer).toBe(true);
+ expect(chunk.length).toBe(512);
+ expect(chunk.toString()).toBe(data.slice(i, i + 512));
+ i += 512;
+ });
+
+ stream.on("end", () => {
+ resolve(true);
+ });
+ });
+ });
+
+ it("works with larger highWaterMark (1024 chunk)", async () => {
+ var stream = createReadStream(import.meta.dir + "/readLargeFileSync.txt", {
+ highWaterMark: 1024,
+ });
+
+ var data = readFileSync(import.meta.dir + "/readLargeFileSync.txt", "utf8");
+ var i = 0;
+ return await new Promise(resolve => {
+ stream.on("data", chunk => {
+ expect(chunk instanceof Buffer).toBe(true);
+ expect(chunk.length).toBe(1024);
+ expect(chunk.toString()).toBe(data.slice(i, i + 1024));
+ i += 1024;
+ });
+
+ stream.on("end", () => {
+ resolve(true);
+ });
+ });
+ });
+
+ it("works with very large file", async () => {
+ const tempFile = tmpdir() + "/" + "large-file" + Date.now() + ".txt";
+ await Bun.write(Bun.file(tempFile), "big data big data big data".repeat(10000));
+ var stream = createReadStream(tempFile, {
+ highWaterMark: 512,
+ });
+
+ var data = readFileSync(tempFile, "utf8");
+ var i = 0;
+ return await new Promise(resolve => {
+ stream.on("data", chunk => {
+ expect(chunk instanceof Buffer).toBe(true);
+ expect(chunk.toString()).toBe(data.slice(i, i + chunk.length));
+ i += chunk.length;
+ });
+ stream.on("end", () => {
+ expect(i).toBe("big data big data big data".repeat(10000).length);
+ rmSync(tempFile);
+ resolve(true);
+ });
+ });
+ });
});
describe("fs.WriteStream", () => {