diff options
author | 2023-07-29 06:44:33 +0800 | |
---|---|---|
committer | 2023-07-28 15:44:33 -0700 | |
commit | 9b91e3c1a25548217d846932c14e3ccdd0942a99 (patch) | |
tree | 230806bbe73638a1edd3334af6a3acab119b3429 | |
parent | 7a1ebec26fc1a3f480fca5e5508d5ceda5a2ebcf (diff) | |
download | bun-9b91e3c1a25548217d846932c14e3ccdd0942a99.tar.gz bun-9b91e3c1a25548217d846932c14e3ccdd0942a99.tar.zst bun-9b91e3c1a25548217d846932c14e3ccdd0942a99.zip |
fix the chunk boundary (`node:stream:createReadStream`) (#3853)
* fix the slice boundary.
Close: #3668
* Add more boundary test case.
* fix end is 0.
-rw-r--r-- | src/js/node/fs.js | 8 | ||||
-rw-r--r-- | src/js/out/modules/node/fs.js | 8 | ||||
-rw-r--r-- | test/js/node/stream/node-stream.test.js | 82 |
3 files changed, 90 insertions, 8 deletions
diff --git a/src/js/node/fs.js b/src/js/node/fs.js index aa03ffc5c..d287809c6 100644 --- a/src/js/node/fs.js +++ b/src/js/node/fs.js @@ -540,15 +540,15 @@ ReadStream = (function (InternalReadStream) { chunk = chunk.slice(-n); var [_, ...rest] = arguments; this.pos = this.bytesRead; - if (this.end && this.bytesRead >= this.end) { - chunk = chunk.slice(0, this.end - this.start); + if (this.end !== undefined && this.bytesRead > this.end) { + chunk = chunk.slice(0, this.end - this.start + 1); } return super.push(chunk, ...rest); } var end = this.end; // This is multi-chunk read case where we go passed the end of the what we want to read in the last chunk - if (end && this.bytesRead >= end) { - chunk = chunk.slice(0, end - currPos); + if (end !== undefined && this.bytesRead > end) { + chunk = chunk.slice(0, end - currPos + 1); var [_, ...rest] = arguments; this.pos = this.bytesRead; return super.push(chunk, ...rest); diff --git a/src/js/out/modules/node/fs.js b/src/js/out/modules/node/fs.js index 720d03134..7bb354dea 100644 --- a/src/js/out/modules/node/fs.js +++ b/src/js/out/modules/node/fs.js @@ -310,13 +310,13 @@ ReadStream = function(InternalReadStream) { var n = this.bytesRead - currPos; chunk = chunk.slice(-n); var [_, ...rest] = arguments; - if (this.pos = this.bytesRead, this.end && this.bytesRead >= this.end) - chunk = chunk.slice(0, this.end - this.start); + if (this.pos = this.bytesRead, this.end !== void 0 && this.bytesRead > this.end) + chunk = chunk.slice(0, this.end - this.start + 1); return super.push(chunk, ...rest); } var end = this.end; - if (end && this.bytesRead >= end) { - chunk = chunk.slice(0, end - currPos); + if (end !== void 0 && this.bytesRead > end) { + chunk = chunk.slice(0, end - currPos + 1); var [_, ...rest] = arguments; return this.pos = this.bytesRead, super.push(chunk, ...rest); } diff --git a/test/js/node/stream/node-stream.test.js b/test/js/node/stream/node-stream.test.js index 6bff28b94..51252a5f6 100644 --- a/test/js/node/stream/node-stream.test.js +++ b/test/js/node/stream/node-stream.test.js @@ -1,5 +1,8 @@ import { expect, describe, it } from "bun:test"; import { Readable, Writable, Duplex, Transform, PassThrough } from "node:stream"; +import { createReadStream } from "node:fs"; +import { tmpdir } from "node:os"; +import { writeFileSync } from "node:fs"; describe("Readable", () => { it("should be able to be created without _construct method defined", done => { @@ -38,6 +41,85 @@ describe("Readable", () => { readable.pipe(writable); }); + it("should be able to be piped via .pipe, issue #3668", done => { + const path = `${tmpdir()}/${Date.now()}.testReadStream.txt`; + writeFileSync(path, "12345"); + const stream = createReadStream(path, { start: 0, end: 4 }); + + const writable = new Writable({ + write(chunk, encoding, callback) { + try { + expect(chunk.toString()).toBe("12345"); + } catch (err) { + done(err); + return; + } + callback(); + done(); + }, + }); + + stream.on("error", err => { + done(err); + }); + + stream.pipe(writable); + }); + it("should be able to be piped via .pipe, both start and end are 0", done => { + const path = `${tmpdir()}/${Date.now()}.testReadStream2.txt`; + writeFileSync(path, "12345"); + const stream = createReadStream(path, { start: 0, end: 0 }); + + const writable = new Writable({ + write(chunk, encoding, callback) { + try { + // Both start and end are inclusive and start counting at 0. + expect(chunk.toString()).toBe("1"); + } catch (err) { + done(err); + return; + } + callback(); + done(); + }, + }); + + stream.on("error", err => { + done(err); + }); + + stream.pipe(writable); + }); + it("should be able to be piped via .pipe with a large file", done => { + const length = 128 * 1024; + const data = "B".repeat(length); + const path = `${tmpdir()}/${Date.now()}.testReadStreamLargeFile.txt`; + writeFileSync(path, data); + const stream = createReadStream(path, { start: 0, end: length - 1 }); + + let res = ""; + let count = 0; + const writable = new Writable({ + write(chunk, encoding, callback) { + count += 1; + res += chunk; + callback(); + }, + }); + writable.on("finish", () => { + try { + expect(res).toEqual(data); + expect(count).toBeGreaterThan(1); + } catch (err) { + return done(err); + } + done(); + }); + stream.on("error", err => { + done(err); + }); + stream.pipe(writable); + }); }); describe("Duplex", () => { |