aboutsummaryrefslogtreecommitdiff
path: root/src/bun.js/webcore
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> 2022-06-22 23:21:48 -0700
committerGravatar Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> 2022-06-22 23:21:48 -0700
commit729d445b6885f69dd2c6355f38707bd42851c791 (patch)
treef87a7c408929ea3f57bbb7ace380cf869da83c0e /src/bun.js/webcore
parent25f820c6bf1d8ec6d444ef579cc036b8c0607b75 (diff)
downloadbun-jarred/rename.tar.gz
bun-jarred/rename.tar.zst
bun-jarred/rename.zip
change the directory structurejarred/rename
Diffstat (limited to 'src/bun.js/webcore')
-rw-r--r--src/bun.js/webcore/base64.zig445
-rw-r--r--src/bun.js/webcore/encoding.zig1247
-rw-r--r--src/bun.js/webcore/response.zig4844
-rw-r--r--src/bun.js/webcore/streams.zig2208
4 files changed, 8744 insertions, 0 deletions
diff --git a/src/bun.js/webcore/base64.zig b/src/bun.js/webcore/base64.zig
new file mode 100644
index 000000000..50c1ac68d
--- /dev/null
+++ b/src/bun.js/webcore/base64.zig
@@ -0,0 +1,445 @@
+// this is ripped from zig's stdlib
+const std = @import("std");
+const assert = std.debug.assert;
+const testing = std.testing;
+const mem = std.mem;
+
+pub const Error = error{
+ InvalidCharacter,
+ InvalidPadding,
+ NoSpaceLeft,
+};
+
+/// Base64 codecs
+pub const Codecs = struct {
+ alphabet_chars: [64]u8,
+ pad_char: ?u8,
+ decoderWithIgnore: fn (ignore: []const u8) Base64DecoderWithIgnore,
+ Encoder: Base64Encoder,
+ Decoder: Base64Decoder,
+};
+
+pub const standard_alphabet_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".*;
+fn standardBase64DecoderWithIgnore(ignore: []const u8) Base64DecoderWithIgnore {
+ return Base64DecoderWithIgnore.init(standard_alphabet_chars, '=', ignore);
+}
+
+/// Standard Base64 codecs, with padding
+pub const standard = Codecs{
+ .alphabet_chars = standard_alphabet_chars,
+ .pad_char = '=',
+ .decoderWithIgnore = standardBase64DecoderWithIgnore,
+ .Encoder = Base64Encoder.init(standard_alphabet_chars, '='),
+ .Decoder = Base64Decoder.init(standard_alphabet_chars, '='),
+};
+
+/// Standard Base64 codecs, without padding
+pub const standard_no_pad = Codecs{
+ .alphabet_chars = standard_alphabet_chars,
+ .pad_char = null,
+ .decoderWithIgnore = standardBase64DecoderWithIgnore,
+ .Encoder = Base64Encoder.init(standard_alphabet_chars, null),
+ .Decoder = Base64Decoder.init(standard_alphabet_chars, null),
+};
+
+pub const url_safe_alphabet_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_".*;
+fn urlSafeBase64DecoderWithIgnore(ignore: []const u8) Base64DecoderWithIgnore {
+ return Base64DecoderWithIgnore.init(url_safe_alphabet_chars, null, ignore);
+}
+
+/// URL-safe Base64 codecs, with padding
+pub const url_safe = Codecs{
+ .alphabet_chars = url_safe_alphabet_chars,
+ .pad_char = '=',
+ .decoderWithIgnore = urlSafeBase64DecoderWithIgnore,
+ .Encoder = Base64Encoder.init(url_safe_alphabet_chars, '='),
+ .Decoder = Base64Decoder.init(url_safe_alphabet_chars, '='),
+};
+
+/// URL-safe Base64 codecs, without padding
+pub const url_safe_no_pad = Codecs{
+ .alphabet_chars = url_safe_alphabet_chars,
+ .pad_char = null,
+ .decoderWithIgnore = urlSafeBase64DecoderWithIgnore,
+ .Encoder = Base64Encoder.init(url_safe_alphabet_chars, null),
+ .Decoder = Base64Decoder.init(url_safe_alphabet_chars, null),
+};
+
+pub const standard_pad_char = @compileError("deprecated; use standard.pad_char");
+pub const standard_encoder = @compileError("deprecated; use standard.Encoder");
+pub const standard_decoder = @compileError("deprecated; use standard.Decoder");
+
+pub const Base64Encoder = struct {
+ alphabet_chars: [64]u8,
+ pad_char: ?u8,
+
+ /// A bunch of assertions, then simply pass the data right through.
+ pub fn init(alphabet_chars: [64]u8, pad_char: ?u8) Base64Encoder {
+ assert(alphabet_chars.len == 64);
+ var char_in_alphabet = [_]bool{false} ** 256;
+ for (alphabet_chars) |c| {
+ assert(!char_in_alphabet[c]);
+ assert(pad_char == null or c != pad_char.?);
+ char_in_alphabet[c] = true;
+ }
+ return Base64Encoder{
+ .alphabet_chars = alphabet_chars,
+ .pad_char = pad_char,
+ };
+ }
+
+ /// Compute the encoded length
+ pub fn calcSize(encoder: *const Base64Encoder, source_len: usize) usize {
+ if (encoder.pad_char != null) {
+ return @divTrunc(source_len + 2, 3) * 4;
+ } else {
+ const leftover = source_len % 3;
+ return @divTrunc(source_len, 3) * 4 + @divTrunc(leftover * 4 + 2, 3);
+ }
+ }
+
+ /// dest.len must at least be what you get from ::calcSize.
+ pub fn encode(encoder: *const Base64Encoder, dest: []u8, source: []const u8) []const u8 {
+ const out_len = encoder.calcSize(source.len);
+ assert(dest.len >= out_len);
+
+ var acc: u12 = 0;
+ var acc_len: u4 = 0;
+ var out_idx: usize = 0;
+ for (source) |v| {
+ acc = (acc << 8) + v;
+ acc_len += 8;
+ while (acc_len >= 6) {
+ acc_len -= 6;
+ dest[out_idx] = encoder.alphabet_chars[@truncate(u6, (acc >> acc_len))];
+ out_idx += 1;
+ }
+ }
+ if (acc_len > 0) {
+ dest[out_idx] = encoder.alphabet_chars[@truncate(u6, (acc << 6 - acc_len))];
+ out_idx += 1;
+ }
+ if (encoder.pad_char) |pad_char| {
+ for (dest[out_idx..]) |*pad| {
+ pad.* = pad_char;
+ }
+ }
+ return dest[0..out_len];
+ }
+};
+
+pub const Base64Decoder = struct {
+ const invalid_char: u8 = 0xff;
+
+ /// e.g. 'A' => 0.
+ /// `invalid_char` for any value not in the 64 alphabet chars.
+ char_to_index: [256]u8,
+ pad_char: ?u8,
+
+ pub fn init(alphabet_chars: [64]u8, pad_char: ?u8) Base64Decoder {
+ var result = Base64Decoder{
+ .char_to_index = [_]u8{invalid_char} ** 256,
+ .pad_char = pad_char,
+ };
+
+ var char_in_alphabet = [_]bool{false} ** 256;
+ for (alphabet_chars) |c, i| {
+ assert(!char_in_alphabet[c]);
+ assert(pad_char == null or c != pad_char.?);
+
+ result.char_to_index[c] = @intCast(u8, i);
+ char_in_alphabet[c] = true;
+ }
+ return result;
+ }
+
+ /// Return the maximum possible decoded size for a given input length - The actual length may be less if the input includes padding.
+ /// `InvalidPadding` is returned if the input length is not valid.
+ pub fn calcSizeUpperBound(decoder: *const Base64Decoder, source_len: usize) Error!usize {
+ var result = source_len / 4 * 3;
+ const leftover = source_len % 4;
+ if (decoder.pad_char != null) {
+ if (leftover % 4 != 0) return error.InvalidPadding;
+ } else {
+ if (leftover % 4 == 1) return error.InvalidPadding;
+ result += leftover * 3 / 4;
+ }
+ return result;
+ }
+
+ /// Return the exact decoded size for a slice.
+ /// `InvalidPadding` is returned if the input length is not valid.
+ pub fn calcSizeForSlice(decoder: *const Base64Decoder, source: []const u8) Error!usize {
+ const source_len = source.len;
+ var result = try decoder.calcSizeUpperBound(source_len);
+ if (decoder.pad_char) |pad_char| {
+ if (source_len >= 1 and source[source_len - 1] == pad_char) result -= 1;
+ if (source_len >= 2 and source[source_len - 2] == pad_char) result -= 1;
+ }
+ return result;
+ }
+
+ /// dest.len must be what you get from ::calcSize.
+ /// invalid characters result in error.InvalidCharacter.
+ /// invalid padding results in error.InvalidPadding.
+ pub fn decode(decoder: *const Base64Decoder, dest: []u8, source: []const u8) Error!void {
+ if (decoder.pad_char != null and source.len % 4 != 0) return error.InvalidPadding;
+ var acc: u12 = 0;
+ var acc_len: u4 = 0;
+ var dest_idx: usize = 0;
+ var leftover_idx: ?usize = null;
+ for (source) |c, src_idx| {
+ const d = decoder.char_to_index[c];
+ if (d == invalid_char) {
+ if (decoder.pad_char == null or c != decoder.pad_char.?) return error.InvalidCharacter;
+ leftover_idx = src_idx;
+ break;
+ }
+ acc = (acc << 6) + d;
+ acc_len += 6;
+ if (acc_len >= 8) {
+ acc_len -= 8;
+ dest[dest_idx] = @truncate(u8, acc >> acc_len);
+ dest_idx += 1;
+ }
+ }
+ if (acc_len > 4 or (acc & (@as(u12, 1) << acc_len) - 1) != 0) {
+ return error.InvalidPadding;
+ }
+ if (leftover_idx == null) return;
+ var leftover = source[leftover_idx.?..];
+ if (decoder.pad_char) |pad_char| {
+ const padding_len = acc_len / 2;
+ var padding_chars: usize = 0;
+ for (leftover) |c| {
+ if (c != pad_char) {
+ return if (c == Base64Decoder.invalid_char) error.InvalidCharacter else error.InvalidPadding;
+ }
+ padding_chars += 1;
+ }
+ if (padding_chars != padding_len) return error.InvalidPadding;
+ }
+ }
+};
+
+pub const Base64DecoderWithIgnore = struct {
+ decoder: Base64Decoder,
+ char_is_ignored: [256]bool,
+
+ pub fn init(alphabet_chars: [64]u8, pad_char: ?u8, ignore_chars: []const u8) Base64DecoderWithIgnore {
+ var result = Base64DecoderWithIgnore{
+ .decoder = Base64Decoder.init(alphabet_chars, pad_char),
+ .char_is_ignored = [_]bool{false} ** 256,
+ };
+ for (ignore_chars) |c| {
+ assert(result.decoder.char_to_index[c] == Base64Decoder.invalid_char);
+ assert(!result.char_is_ignored[c]);
+ assert(result.decoder.pad_char != c);
+ result.char_is_ignored[c] = true;
+ }
+ return result;
+ }
+
+ /// Return the maximum possible decoded size for a given input length - The actual length may be less if the input includes padding
+ /// `InvalidPadding` is returned if the input length is not valid.
+ pub fn calcSizeUpperBound(decoder_with_ignore: *const Base64DecoderWithIgnore, source_len: usize) Error!usize {
+ var result = source_len / 4 * 3;
+ if (decoder_with_ignore.decoder.pad_char == null) {
+ const leftover = source_len % 4;
+ result += leftover * 3 / 4;
+ }
+ return result;
+ }
+
+ /// Invalid characters that are not ignored result in error.InvalidCharacter.
+ /// Invalid padding results in error.InvalidPadding.
+ /// Decoding more data than can fit in dest results in error.NoSpaceLeft. See also ::calcSizeUpperBound.
+ /// Returns the number of bytes written to dest.
+ pub fn decode(decoder_with_ignore: *const Base64DecoderWithIgnore, dest: []u8, source: []const u8) Error!usize {
+ const decoder = &decoder_with_ignore.decoder;
+ var acc: u12 = 0;
+ var acc_len: u4 = 0;
+ var dest_idx: usize = 0;
+ var leftover_idx: ?usize = null;
+ for (source) |c, src_idx| {
+ if (decoder_with_ignore.char_is_ignored[c]) continue;
+ const d = decoder.char_to_index[c];
+ if (d == Base64Decoder.invalid_char) {
+ if (decoder.pad_char == null or c != decoder.pad_char.?) return error.InvalidCharacter;
+ leftover_idx = src_idx;
+ break;
+ }
+ acc = (acc << 6) + d;
+ acc_len += 6;
+ if (acc_len >= 8) {
+ if (dest_idx == dest.len) return error.NoSpaceLeft;
+ acc_len -= 8;
+ dest[dest_idx] = @truncate(u8, acc >> acc_len);
+ dest_idx += 1;
+ }
+ }
+ if (acc_len > 4 or (acc & (@as(u12, 1) << acc_len) - 1) != 0) {
+ return error.InvalidPadding;
+ }
+ const padding_len = acc_len / 2;
+ if (leftover_idx == null) {
+ if (decoder.pad_char != null and padding_len != 0) return error.InvalidPadding;
+ return dest_idx;
+ }
+ var leftover = source[leftover_idx.?..];
+ if (decoder.pad_char) |pad_char| {
+ var padding_chars: usize = 0;
+ for (leftover) |c| {
+ if (decoder_with_ignore.char_is_ignored[c]) continue;
+ if (c != pad_char) {
+ return if (c == Base64Decoder.invalid_char) error.InvalidCharacter else error.InvalidPadding;
+ }
+ padding_chars += 1;
+ }
+ if (padding_chars != padding_len) return error.InvalidPadding;
+ }
+ return dest_idx;
+ }
+};
+
+test "base64" {
+ @setEvalBranchQuota(8000);
+ try testBase64();
+ comptime try testAllApis(standard, "comptime", "Y29tcHRpbWU=");
+}
+
+test "base64 url_safe_no_pad" {
+ @setEvalBranchQuota(8000);
+ try testBase64UrlSafeNoPad();
+ comptime try testAllApis(url_safe_no_pad, "comptime", "Y29tcHRpbWU");
+}
+
+fn testBase64() !void {
+ const codecs = standard;
+
+ try testAllApis(codecs, "", "");
+ try testAllApis(codecs, "f", "Zg==");
+ try testAllApis(codecs, "fo", "Zm8=");
+ try testAllApis(codecs, "foo", "Zm9v");
+ try testAllApis(codecs, "foob", "Zm9vYg==");
+ try testAllApis(codecs, "fooba", "Zm9vYmE=");
+ try testAllApis(codecs, "foobar", "Zm9vYmFy");
+
+ try testDecodeIgnoreSpace(codecs, "", " ");
+ try testDecodeIgnoreSpace(codecs, "f", "Z g= =");
+ try testDecodeIgnoreSpace(codecs, "fo", " Zm8=");
+ try testDecodeIgnoreSpace(codecs, "foo", "Zm9v ");
+ try testDecodeIgnoreSpace(codecs, "foob", "Zm9vYg = = ");
+ try testDecodeIgnoreSpace(codecs, "fooba", "Zm9v YmE=");
+ try testDecodeIgnoreSpace(codecs, "foobar", " Z m 9 v Y m F y ");
+
+ // test getting some api errors
+ try testError(codecs, "A", error.InvalidPadding);
+ try testError(codecs, "AA", error.InvalidPadding);
+ try testError(codecs, "AAA", error.InvalidPadding);
+ try testError(codecs, "A..A", error.InvalidCharacter);
+ try testError(codecs, "AA=A", error.InvalidPadding);
+ try testError(codecs, "AA/=", error.InvalidPadding);
+ try testError(codecs, "A/==", error.InvalidPadding);
+ try testError(codecs, "A===", error.InvalidPadding);
+ try testError(codecs, "====", error.InvalidPadding);
+
+ try testNoSpaceLeftError(codecs, "AA==");
+ try testNoSpaceLeftError(codecs, "AAA=");
+ try testNoSpaceLeftError(codecs, "AAAA");
+ try testNoSpaceLeftError(codecs, "AAAAAA==");
+}
+
+fn testBase64UrlSafeNoPad() !void {
+ const codecs = url_safe_no_pad;
+
+ try testAllApis(codecs, "", "");
+ try testAllApis(codecs, "f", "Zg");
+ try testAllApis(codecs, "fo", "Zm8");
+ try testAllApis(codecs, "foo", "Zm9v");
+ try testAllApis(codecs, "foob", "Zm9vYg");
+ try testAllApis(codecs, "fooba", "Zm9vYmE");
+ try testAllApis(codecs, "foobar", "Zm9vYmFy");
+
+ try testDecodeIgnoreSpace(codecs, "", " ");
+ try testDecodeIgnoreSpace(codecs, "f", "Z g ");
+ try testDecodeIgnoreSpace(codecs, "fo", " Zm8");
+ try testDecodeIgnoreSpace(codecs, "foo", "Zm9v ");
+ try testDecodeIgnoreSpace(codecs, "foob", "Zm9vYg ");
+ try testDecodeIgnoreSpace(codecs, "fooba", "Zm9v YmE");
+ try testDecodeIgnoreSpace(codecs, "foobar", " Z m 9 v Y m F y ");
+
+ // test getting some api errors
+ try testError(codecs, "A", error.InvalidPadding);
+ try testError(codecs, "AAA=", error.InvalidCharacter);
+ try testError(codecs, "A..A", error.InvalidCharacter);
+ try testError(codecs, "AA=A", error.InvalidCharacter);
+ try testError(codecs, "AA/=", error.InvalidCharacter);
+ try testError(codecs, "A/==", error.InvalidCharacter);
+ try testError(codecs, "A===", error.InvalidCharacter);
+ try testError(codecs, "====", error.InvalidCharacter);
+
+ try testNoSpaceLeftError(codecs, "AA");
+ try testNoSpaceLeftError(codecs, "AAA");
+ try testNoSpaceLeftError(codecs, "AAAA");
+ try testNoSpaceLeftError(codecs, "AAAAAA");
+}
+
+fn testAllApis(codecs: Codecs, expected_decoded: []const u8, expected_encoded: []const u8) !void {
+ // Base64Encoder
+ {
+ var buffer: [0x100]u8 = undefined;
+ const encoded = codecs.Encoder.encode(&buffer, expected_decoded);
+ try testing.expectEqualSlices(u8, expected_encoded, encoded);
+ }
+
+ // Base64Decoder
+ {
+ var buffer: [0x100]u8 = undefined;
+ var decoded = buffer[0..try codecs.Decoder.calcSizeForSlice(expected_encoded)];
+ try codecs.Decoder.decode(decoded, expected_encoded);
+ try testing.expectEqualSlices(u8, expected_decoded, decoded);
+ }
+
+ // Base64DecoderWithIgnore
+ {
+ const decoder_ignore_nothing = codecs.decoderWithIgnore("");
+ var buffer: [0x100]u8 = undefined;
+ var decoded = buffer[0..try decoder_ignore_nothing.calcSizeUpperBound(expected_encoded.len)];
+ var written = try decoder_ignore_nothing.decode(decoded, expected_encoded);
+ try testing.expect(written <= decoded.len);
+ try testing.expectEqualSlices(u8, expected_decoded, decoded[0..written]);
+ }
+}
+
+fn testDecodeIgnoreSpace(codecs: Codecs, expected_decoded: []const u8, encoded: []const u8) !void {
+ const decoder_ignore_space = codecs.decoderWithIgnore(" ");
+ var buffer: [0x100]u8 = undefined;
+ var decoded = buffer[0..try decoder_ignore_space.calcSizeUpperBound(encoded.len)];
+ var written = try decoder_ignore_space.decode(decoded, encoded);
+ try testing.expectEqualSlices(u8, expected_decoded, decoded[0..written]);
+}
+
+fn testError(codecs: Codecs, encoded: []const u8, expected_err: anyerror) !void {
+ const decoder_ignore_space = codecs.decoderWithIgnore(" ");
+ var buffer: [0x100]u8 = undefined;
+ if (codecs.Decoder.calcSizeForSlice(encoded)) |decoded_size| {
+ var decoded = buffer[0..decoded_size];
+ if (codecs.Decoder.decode(decoded, encoded)) |_| {
+ return error.ExpectedError;
+ } else |err| if (err != expected_err) return err;
+ } else |err| if (err != expected_err) return err;
+
+ if (decoder_ignore_space.decode(buffer[0..], encoded)) |_| {
+ return error.ExpectedError;
+ } else |err| if (err != expected_err) return err;
+}
+
+fn testNoSpaceLeftError(codecs: Codecs, encoded: []const u8) !void {
+ const decoder_ignore_space = codecs.decoderWithIgnore(" ");
+ var buffer: [0x100]u8 = undefined;
+ var decoded = buffer[0 .. (try codecs.Decoder.calcSizeForSlice(encoded)) - 1];
+ if (decoder_ignore_space.decode(decoded, encoded)) |_| {
+ return error.ExpectedError;
+ } else |err| if (err != error.NoSpaceLeft) return err;
+}
diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig
new file mode 100644
index 000000000..cfef1c0d7
--- /dev/null
+++ b/src/bun.js/webcore/encoding.zig
@@ -0,0 +1,1247 @@
+const std = @import("std");
+const Api = @import("../../api/schema.zig").Api;
+const RequestContext = @import("../../http.zig").RequestContext;
+const MimeType = @import("../../http.zig").MimeType;
+const ZigURL = @import("../../url.zig").URL;
+const HTTPClient = @import("http");
+const NetworkThread = HTTPClient.NetworkThread;
+
+const JSC = @import("../../jsc.zig");
+const js = JSC.C;
+
+const Method = @import("../../http/method.zig").Method;
+
+const ObjectPool = @import("../../pool.zig").ObjectPool;
+const bun = @import("../../global.zig");
+const Output = @import("../../global.zig").Output;
+const MutableString = @import("../../global.zig").MutableString;
+const strings = @import("../../global.zig").strings;
+const string = @import("../../global.zig").string;
+const default_allocator = @import("../../global.zig").default_allocator;
+const FeatureFlags = @import("../../global.zig").FeatureFlags;
+const ArrayBuffer = @import("../base.zig").ArrayBuffer;
+const Properties = @import("../base.zig").Properties;
+const NewClass = @import("../base.zig").NewClass;
+const d = @import("../base.zig").d;
+const castObj = @import("../base.zig").castObj;
+const getAllocator = @import("../base.zig").getAllocator;
+const JSPrivateDataPtr = @import("../base.zig").JSPrivateDataPtr;
+const GetJSPrivateData = @import("../base.zig").GetJSPrivateData;
+const Environment = @import("../../env.zig");
+const ZigString = JSC.ZigString;
+const JSInternalPromise = JSC.JSInternalPromise;
+const JSPromise = JSC.JSPromise;
+const JSValue = JSC.JSValue;
+const JSError = JSC.JSError;
+const JSGlobalObject = JSC.JSGlobalObject;
+
+const VirtualMachine = @import("../javascript.zig").VirtualMachine;
+const Task = @import("../javascript.zig").Task;
+
+const picohttp = @import("picohttp");
+
+pub const TextEncoder = struct {
+ filler: u32 = 0,
+
+ const utf8_string: string = "utf-8";
+
+ pub export fn TextEncoder__encode(
+ globalThis: *JSGlobalObject,
+ zig_str: *const ZigString,
+ ) JSValue {
+ var ctx = globalThis.ref();
+ if (zig_str.is16Bit()) {
+ var bytes = strings.toUTF8AllocWithType(
+ default_allocator,
+ @TypeOf(zig_str.utf16Slice()),
+ zig_str.utf16Slice(),
+ ) catch {
+ return JSC.toInvalidArguments("Out of memory", .{}, ctx);
+ };
+ return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJS(ctx, null);
+ } else {
+ // latin1 always has the same length as utf-8
+ // so we can use the Gigacage to allocate the buffer
+ var array = JSC.JSValue.createUninitializedUint8Array(ctx.ptr(), zig_str.len);
+ var buffer = array.asArrayBuffer(ctx.ptr()) orelse
+ return JSC.toInvalidArguments("Out of memory", .{}, ctx);
+ const result = strings.copyLatin1IntoUTF8(buffer.slice(), []const u8, zig_str.slice());
+ std.debug.assert(result.written == zig_str.len);
+ return array;
+ }
+
+ unreachable;
+ }
+
+ // This is a fast path for copying a Rope string into a Uint8Array.
+ // This keeps us from an extra string temporary allocation
+ const RopeStringEncoder = struct {
+ globalThis: *JSGlobalObject,
+ allocator: std.mem.Allocator,
+ buffer_value: JSC.JSValue,
+ slice: []u8,
+ tail: usize = 0,
+ any_utf16: bool = false,
+
+ pub fn append8(it: *JSC.JSString.Iterator, ptr: [*]const u8, len: u32) callconv(.C) void {
+ var this = bun.cast(*RopeStringEncoder, it.data.?);
+ // we use memcpy here instead of encoding
+ // SIMD only has an impact for long strings
+ // so in a case like this, the fastest path is to memcpy
+ // and then later, we can use the SIMD version
+ @memcpy(this.slice.ptr + this.tail, ptr, len);
+ this.tail += len;
+ }
+ pub fn append16(it: *JSC.JSString.Iterator, _: [*]const u16, _: u32) callconv(.C) void {
+ var this = bun.cast(*RopeStringEncoder, it.data.?);
+ this.any_utf16 = true;
+ it.stop = 1;
+ return;
+ }
+ pub fn write8(it: *JSC.JSString.Iterator, ptr: [*]const u8, len: u32, offset: u32) callconv(.C) void {
+ var this = bun.cast(*RopeStringEncoder, it.data.?);
+ // we use memcpy here instead of encoding
+ // SIMD only has an impact for long strings
+ // so in a case like this, the fastest path is to memcpy
+ // and then later, we can use the SIMD version
+ @memcpy(this.slice.ptr + offset, ptr, len);
+ }
+ pub fn write16(it: *JSC.JSString.Iterator, _: [*]const u16, _: u32, _: u32) callconv(.C) void {
+ var this = bun.cast(*RopeStringEncoder, it.data.?);
+ this.any_utf16 = true;
+ it.stop = 1;
+ return;
+ }
+
+ pub fn iter(this: *RopeStringEncoder) JSC.JSString.Iterator {
+ return .{
+ .data = this,
+ .stop = 0,
+ .append8 = append8,
+ .append16 = append16,
+ .write8 = write8,
+ .write16 = write16,
+ };
+ }
+ };
+
+ // This fast path is only suitable for Latin-1 strings.
+ // It's not suitable for UTF-16 strings, because getting the byteLength is unpredictable
+ pub export fn TextEncoder__encodeRopeString(
+ globalThis: *JSGlobalObject,
+ rope_str: *JSC.JSString,
+ ) JSValue {
+ var ctx = globalThis.ref();
+ if (comptime Environment.allow_assert) std.debug.assert(rope_str.is8Bit());
+ var array = JSC.JSValue.createUninitializedUint8Array(ctx.ptr(), rope_str.length());
+ var encoder = RopeStringEncoder{
+ .globalThis = globalThis,
+ .allocator = bun.default_allocator,
+ .buffer_value = array,
+ .slice = (array.asArrayBuffer(globalThis) orelse return JSC.JSValue.jsUndefined()).slice(),
+ };
+ var iter = encoder.iter();
+ rope_str.iterator(globalThis, &iter);
+
+ if (encoder.any_utf16) {
+ return JSC.JSValue.jsUndefined();
+ }
+
+ if (comptime !bun.FeatureFlags.latin1_is_now_ascii) {
+ strings.replaceLatin1WithUTF8(encoder.slice);
+ }
+
+ return array;
+ }
+
+ const read_key = ZigString.init("read");
+ const written_key = ZigString.init("written");
+
+ pub export fn TextEncoder__encodeInto(
+ globalThis: *JSC.JSGlobalObject,
+ input: *const ZigString,
+ buf_ptr: [*]u8,
+ buf_len: usize,
+ ) JSC.JSValue {
+ var output = buf_ptr[0..buf_len];
+ var result: strings.EncodeIntoResult = strings.EncodeIntoResult{ .read = 0, .written = 0 };
+ if (input.is16Bit()) {
+ const utf16_slice = input.utf16Slice();
+ result = strings.copyUTF16IntoUTF8(output, @TypeOf(utf16_slice), utf16_slice);
+ } else {
+ result = strings.copyLatin1IntoUTF8(output, @TypeOf(input.slice()), input.slice());
+ }
+ return JSC.JSValue.createObject2(globalThis, &read_key, &written_key, JSValue.jsNumber(result.read), JSValue.jsNumber(result.written));
+ }
+};
+
+comptime {
+ if (!JSC.is_bindgen) {
+ _ = TextEncoder.TextEncoder__encode;
+ _ = TextEncoder.TextEncoder__encodeInto;
+ _ = TextEncoder.TextEncoder__encodeRopeString;
+ }
+}
+
+/// https://encoding.spec.whatwg.org/encodings.json
+pub const EncodingLabel = enum {
+ @"UTF-8",
+ @"IBM866",
+ @"ISO-8859-2",
+ @"ISO-8859-3",
+ @"ISO-8859-4",
+ @"ISO-8859-5",
+ @"ISO-8859-6",
+ @"ISO-8859-7",
+ @"ISO-8859-8",
+ @"ISO-8859-8-I",
+ @"ISO-8859-10",
+ @"ISO-8859-13",
+ @"ISO-8859-14",
+ @"ISO-8859-15",
+ @"ISO-8859-16",
+ @"KOI8-R",
+ @"KOI8-U",
+ @"macintosh",
+ @"windows-874",
+ @"windows-1250",
+ @"windows-1251",
+ /// Also known as
+ /// - ASCII
+ /// - latin1
+ @"windows-1252",
+ @"windows-1253",
+ @"windows-1254",
+ @"windows-1255",
+ @"windows-1256",
+ @"windows-1257",
+ @"windows-1258",
+ @"x-mac-cyrillic",
+ @"Big5",
+ @"EUC-JP",
+ @"ISO-2022-JP",
+ @"Shift_JIS",
+ @"EUC-KR",
+ @"UTF-16BE",
+ @"UTF-16LE",
+ @"x-user-defined",
+
+ pub const Map = std.enums.EnumMap(EncodingLabel, string);
+ pub const label: Map = brk: {
+ var map = Map.initFull("");
+ map.put(EncodingLabel.@"UTF-8", "utf-8");
+ map.put(EncodingLabel.@"UTF-16LE", "utf-16le");
+ map.put(EncodingLabel.@"windows-1252", "windows-1252");
+ break :brk map;
+ };
+
+ const utf16_names = [_]string{
+ "ucs-2",
+ "utf-16",
+ "unicode",
+ "utf-16le",
+ "csunicode",
+ "unicodefeff",
+ "iso-10646-ucs-2",
+ };
+
+ const utf8_names = [_]string{
+ "utf8",
+ "utf-8",
+ "unicode11utf8",
+ "unicode20utf8",
+ "x-unicode20utf8",
+ "unicode-1-1-utf-8",
+ };
+
+ const latin1_names = [_]string{
+ "l1",
+ "ascii",
+ "cp819",
+ "cp1252",
+ "ibm819",
+ "latin1",
+ "iso88591",
+ "us-ascii",
+ "x-cp1252",
+ "iso8859-1",
+ "iso_8859-1",
+ "iso-8859-1",
+ "iso-ir-100",
+ "csisolatin1",
+ "windows-1252",
+ "ansi_x3.4-1968",
+ "iso_8859-1:1987",
+ };
+
+ pub const latin1 = EncodingLabel.@"windows-1252";
+
+ pub fn which(input_: string) ?EncodingLabel {
+ const input = strings.trim(input_, " \t\r\n");
+ const ExactMatcher = strings.ExactSizeMatcher;
+ const Eight = ExactMatcher(8);
+ const Sixteen = ExactMatcher(16);
+ return switch (input.len) {
+ 1, 0 => null,
+ 2...8 => switch (Eight.matchLower(input)) {
+ Eight.case("l1"),
+ Eight.case("ascii"),
+ Eight.case("cp819"),
+ Eight.case("cp1252"),
+ Eight.case("ibm819"),
+ Eight.case("latin1"),
+ Eight.case("iso88591"),
+ Eight.case("us-ascii"),
+ Eight.case("x-cp1252"),
+ => EncodingLabel.latin1,
+
+ Eight.case("ucs-2"),
+ Eight.case("utf-16"),
+ Eight.case("unicode"),
+ Eight.case("utf-16le"),
+ => EncodingLabel.@"UTF-16LE",
+
+ Eight.case("utf8"), Eight.case("utf-8") => EncodingLabel.@"UTF-8",
+ else => null,
+ },
+
+ 9...16 => switch (Sixteen.matchLower(input)) {
+ Sixteen.case("iso8859-1"),
+ Sixteen.case("iso_8859-1"),
+ Sixteen.case("iso-8859-1"),
+ Sixteen.case("iso-ir-100"),
+ Sixteen.case("csisolatin1"),
+ Sixteen.case("windows-1252"),
+ Sixteen.case("ansi_x3.4-1968"),
+ Sixteen.case("iso_8859-1:1987"),
+ => EncodingLabel.latin1,
+
+ Sixteen.case("unicode11utf8"),
+ Sixteen.case("unicode20utf8"),
+ Sixteen.case("x-unicode20utf8"),
+ => EncodingLabel.@"UTF-8",
+
+ Sixteen.case("csunicode"),
+ Sixteen.case("unicodefeff"),
+ Sixteen.case("iso-10646-ucs-2"),
+ => EncodingLabel.@"UTF-16LE",
+
+ else => null,
+ },
+ else => if (strings.eqlCaseInsensitiveASCII(input, "unicode-1-1-utf-8", true))
+ EncodingLabel.@"UTF-8"
+ else
+ null,
+ };
+ }
+};
+
+pub const TextDecoder = struct {
+ scratch_memory: []u8 = &[_]u8{},
+ ignore_bom: bool = false,
+ fatal: bool = false,
+ encoding: EncodingLabel = EncodingLabel.utf8,
+
+ pub const Class = NewClass(
+ TextDecoder,
+ .{
+ .name = "TextDecoder",
+ },
+ .{
+ .decode = .{
+ .rfn = decode,
+ },
+ },
+ .{
+ .encoding = .{
+ .get = getEncoding,
+ .readOnly = true,
+ },
+ .ignoreBOM = .{
+ .get = getIgnoreBOM,
+ .set = setIgnoreBOM,
+ },
+ .fatal = .{
+ .get = getFatal,
+ .set = setFatal,
+ },
+ },
+ );
+
+ pub fn getIgnoreBOM(
+ this: *TextDecoder,
+ _: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return JSC.JSValue.jsBoolean(this.ignore_bom).asObjectRef();
+ }
+ pub fn setIgnoreBOM(
+ this: *TextDecoder,
+ _: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ value: JSC.C.JSValueRef,
+ _: js.ExceptionRef,
+ ) bool {
+ this.ignore_bom = JSValue.fromRef(value).toBoolean();
+ return true;
+ }
+ pub fn setFatal(
+ this: *TextDecoder,
+ _: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ value: JSC.C.JSValueRef,
+ _: js.ExceptionRef,
+ ) bool {
+ this.fatal = JSValue.fromRef(value).toBoolean();
+ return true;
+ }
+ pub fn getFatal(
+ this: *TextDecoder,
+ _: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return JSC.JSValue.jsBoolean(this.fatal).asObjectRef();
+ }
+
+ const utf8_string: string = "utf-8";
+ pub fn getEncoding(
+ this: *TextDecoder,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return ZigString.init(EncodingLabel.label.get(this.encoding).?).toValue(ctx.ptr()).asObjectRef();
+ }
+ const Vector16 = std.meta.Vector(16, u16);
+ const max_16_ascii: Vector16 = @splat(16, @as(u16, 127));
+
+ fn decodeUTF16WithAlignment(
+ _: *TextDecoder,
+ comptime Slice: type,
+ slice: Slice,
+ ctx: js.JSContextRef,
+ ) JSC.C.JSValueRef {
+ var i: usize = 0;
+
+ while (i < slice.len) {
+ while (i + strings.ascii_u16_vector_size <= slice.len) {
+ const vec: strings.AsciiU16Vector = slice[i..][0..strings.ascii_u16_vector_size].*;
+ if ((@reduce(
+ .Or,
+ @bitCast(
+ strings.AsciiVectorU16U1,
+ vec > strings.max_u16_ascii,
+ ) | @bitCast(
+ strings.AsciiVectorU16U1,
+ vec < strings.min_u16_ascii,
+ ),
+ ) == 0)) {
+ break;
+ }
+ i += strings.ascii_u16_vector_size;
+ }
+ while (i < slice.len and slice[i] <= 127) {
+ i += 1;
+ }
+ break;
+ }
+
+ // is this actually a UTF-16 string that is just ascii?
+ // we can still allocate as UTF-16 and just copy the bytes
+ if (i == slice.len) {
+ if (comptime Slice == []u16) {
+ return JSC.C.JSValueMakeString(ctx, JSC.C.JSStringCreateWithCharacters(slice.ptr, slice.len));
+ } else {
+ var str = ZigString.init("");
+ str.ptr = @ptrCast([*]u8, slice.ptr);
+ str.len = slice.len;
+ str.markUTF16();
+ return str.toValueGC(ctx.ptr()).asObjectRef();
+ }
+ }
+
+ var buffer = std.ArrayListAlignedUnmanaged(u16, @alignOf(@TypeOf(slice.ptr))){};
+ // copy the allocator to reduce the number of threadlocal accesses
+ const allocator = VirtualMachine.vm.allocator;
+ buffer.ensureTotalCapacity(allocator, slice.len) catch unreachable;
+ buffer.items.len = i;
+
+ @memcpy(
+ std.mem.sliceAsBytes(buffer.items).ptr,
+ std.mem.sliceAsBytes(slice).ptr,
+ std.mem.sliceAsBytes(slice[0..i]).len,
+ );
+
+ const first_high_surrogate = 0xD800;
+ const last_high_surrogate = 0xDBFF;
+ const first_low_surrogate = 0xDC00;
+ const last_low_surrogate = 0xDFFF;
+
+ var remainder = slice[i..];
+ while (remainder.len > 0) {
+ switch (remainder[0]) {
+ 0...127 => {
+ const count: usize = if (strings.firstNonASCII16CheckMin(Slice, remainder, false)) |index| index + 1 else remainder.len;
+
+ buffer.ensureUnusedCapacity(allocator, count) catch unreachable;
+
+ const prev = buffer.items.len;
+ buffer.items.len += count;
+ // Since this string is freshly allocated, we know it's not going to overlap
+ @memcpy(
+ std.mem.sliceAsBytes(buffer.items[prev..]).ptr,
+ std.mem.sliceAsBytes(remainder).ptr,
+ std.mem.sliceAsBytes(remainder[0..count]).len,
+ );
+ remainder = remainder[count..];
+ },
+ first_high_surrogate...last_high_surrogate => |first| {
+ if (remainder.len > 1) {
+ if (remainder[1] >= first_low_surrogate and remainder[1] <= last_low_surrogate) {
+ buffer.ensureUnusedCapacity(allocator, 2) catch unreachable;
+ buffer.items.ptr[buffer.items.len] = first;
+ buffer.items.ptr[buffer.items.len + 1] = remainder[1];
+ buffer.items.len += 2;
+ remainder = remainder[2..];
+ continue;
+ }
+ }
+ buffer.ensureUnusedCapacity(allocator, 1) catch unreachable;
+ buffer.items.ptr[buffer.items.len] = strings.unicode_replacement;
+ buffer.items.len += 1;
+ remainder = remainder[1..];
+ continue;
+ },
+
+ // Is this an unpaired low surrogate or four-digit hex escape?
+ else => {
+ buffer.ensureUnusedCapacity(allocator, 1) catch unreachable;
+ buffer.items.ptr[buffer.items.len] = strings.unicode_replacement;
+ buffer.items.len += 1;
+ remainder = remainder[1..];
+ },
+ }
+ }
+
+ var full = buffer.toOwnedSlice(allocator);
+
+ var out = ZigString.init("");
+ out.ptr = @ptrCast([*]u8, full.ptr);
+ out.len = full.len;
+ out.markUTF16();
+ return out.toValueGC(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn decode(
+ this: *TextDecoder,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ args: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ const arguments: []const JSC.JSValue = @ptrCast([*]const JSC.JSValue, args.ptr)[0..args.len];
+
+ if (arguments.len < 1 or arguments[0].isUndefined()) {
+ return ZigString.Empty.toValue(ctx.ptr()).asObjectRef();
+ }
+
+ const array_buffer = arguments[0].asArrayBuffer(ctx.ptr()) orelse {
+ JSC.throwInvalidArguments("TextDecoder.decode expects an ArrayBuffer or TypedArray", .{}, ctx, exception);
+ return null;
+ };
+
+ if (array_buffer.len == 0) {
+ return ZigString.Empty.toValue(ctx.ptr()).asObjectRef();
+ }
+
+ JSC.C.JSValueProtect(ctx, args[0]);
+ defer JSC.C.JSValueUnprotect(ctx, args[0]);
+
+ switch (this.encoding) {
+ EncodingLabel.@"latin1" => {
+ return ZigString.init(array_buffer.slice()).toValueGC(ctx.ptr()).asObjectRef();
+ },
+ EncodingLabel.@"UTF-8" => {
+ const buffer_slice = array_buffer.slice();
+
+ if (this.fatal) {
+ if (strings.toUTF16Alloc(default_allocator, buffer_slice, true)) |result_| {
+ if (result_) |result| {
+ return ZigString.toExternalU16(result.ptr, result.len, ctx.ptr()).asObjectRef();
+ }
+ } else |err| {
+ switch (err) {
+ error.InvalidByteSequence => {
+ JSC.JSError(default_allocator, "Invalid byte sequence", .{}, ctx, exception);
+ return null;
+ },
+ error.OutOfMemory => {
+ JSC.JSError(default_allocator, "Out of memory", .{}, ctx, exception);
+ return null;
+ },
+ else => {
+ JSC.JSError(default_allocator, "Unknown error", .{}, ctx, exception);
+ return null;
+ },
+ }
+ }
+ } else {
+ if (strings.toUTF16Alloc(default_allocator, buffer_slice, false)) |result_| {
+ if (result_) |result| {
+ return ZigString.toExternalU16(result.ptr, result.len, ctx.ptr()).asObjectRef();
+ }
+ } else |err| {
+ switch (err) {
+ error.OutOfMemory => {
+ JSC.JSError(default_allocator, "Out of memory", .{}, ctx, exception);
+ return null;
+ },
+ else => {
+ JSC.JSError(default_allocator, "Unknown error", .{}, ctx, exception);
+ return null;
+ },
+ }
+ }
+ }
+
+ // Experiment: using mimalloc directly is slightly slower
+ return ZigString.init(buffer_slice).toValueGC(ctx.ptr()).asObjectRef();
+ },
+
+ EncodingLabel.@"UTF-16LE" => {
+ if (std.mem.isAligned(@ptrToInt(array_buffer.ptr) + @as(usize, array_buffer.offset), @alignOf([*]u16))) {
+ return this.decodeUTF16WithAlignment([]u16, array_buffer.asU16(), ctx);
+ }
+
+ return this.decodeUTF16WithAlignment([]align(1) u16, array_buffer.asU16Unaligned(), ctx);
+ },
+ else => {
+ JSC.throwInvalidArguments("TextDecoder.decode set to unsupported encoding", .{}, ctx, exception);
+ return null;
+ },
+ }
+ }
+
+ pub const Constructor = JSC.NewConstructor(TextDecoder, .{
+ .constructor = .{ .rfn = constructor },
+ }, .{});
+
+ pub fn constructor(
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ args_: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var arguments: []const JSC.JSValue = @ptrCast([*]const JSC.JSValue, args_.ptr)[0..args_.len];
+ var encoding = EncodingLabel.@"UTF-8";
+ if (arguments.len > 0) {
+ if (!arguments[0].isString()) {
+ JSC.throwInvalidArguments("TextDecoder(encoding) label is invalid", .{}, ctx, exception);
+ return null;
+ }
+
+ var str = arguments[0].toSlice(ctx.ptr(), default_allocator);
+ defer if (str.allocated) str.deinit();
+ encoding = EncodingLabel.which(str.slice()) orelse {
+ JSC.throwInvalidArguments("Unsupported encoding label \"{s}\"", .{str.slice()}, ctx, exception);
+ return null;
+ };
+ }
+ var decoder = getAllocator(ctx).create(TextDecoder) catch unreachable;
+ decoder.* = TextDecoder{ .encoding = encoding };
+ return TextDecoder.Class.make(ctx, decoder);
+ }
+};
+
+/// This code is incredibly redundant
+/// We have different paths for creaitng a new buffer versus writing into an existing one
+/// That's mostly why all the duplication
+/// The majority of the business logic here is just shooting it off to the optimized functions
+pub const Encoder = struct {
+ export fn Bun__encoding__writeLatin1AsHex(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, .hex);
+ }
+ export fn Bun__encoding__writeLatin1AsASCII(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, .ascii);
+ }
+ export fn Bun__encoding__writeLatin1AsURLSafeBase64(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, .base64url);
+ }
+ export fn Bun__encoding__writeLatin1AsUTF16(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, .utf16le);
+ }
+ export fn Bun__encoding__writeLatin1AsUTF8(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, JSC.Node.Encoding.utf8);
+ }
+ export fn Bun__encoding__writeLatin1AsBase64(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, .base64);
+ }
+ export fn Bun__encoding__writeUTF16AsBase64(input: [*]const u16, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU16(input, len, to, to_len, .base64);
+ }
+ export fn Bun__encoding__writeUTF16AsHex(input: [*]const u16, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU16(input, len, to, to_len, .hex);
+ }
+ export fn Bun__encoding__writeUTF16AsURLSafeBase64(input: [*]const u16, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU16(input, len, to, to_len, .base64url);
+ }
+ export fn Bun__encoding__writeUTF16AsUTF16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU16(input, len, to, to_len, JSC.Node.Encoding.utf16le);
+ }
+ export fn Bun__encoding__writeUTF16AsUTF8(input: [*]const u16, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU16(input, len, to, to_len, .utf8);
+ }
+ export fn Bun__encoding__writeUTF16AsASCII(input: [*]const u8, len: usize, to: [*]u8, to_len: usize) i64 {
+ return writeU8(input, len, to, to_len, .ascii);
+ }
+
+ export fn Bun__encoding__byteLengthLatin1AsHex(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .hex);
+ }
+ export fn Bun__encoding__byteLengthLatin1AsASCII(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .ascii);
+ }
+ export fn Bun__encoding__byteLengthLatin1AsURLSafeBase64(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .base64url);
+ }
+ export fn Bun__encoding__byteLengthLatin1AsUTF16(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .utf16le);
+ }
+ export fn Bun__encoding__byteLengthLatin1AsUTF8(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .utf8);
+ }
+ export fn Bun__encoding__byteLengthLatin1AsBase64(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .base64);
+ }
+ export fn Bun__encoding__byteLengthUTF16AsBase64(input: [*]const u16, len: usize) usize {
+ return byteLengthU16(input, len, .base64);
+ }
+ export fn Bun__encoding__byteLengthUTF16AsHex(input: [*]const u16, len: usize) usize {
+ return byteLengthU16(input, len, .hex);
+ }
+ export fn Bun__encoding__byteLengthUTF16AsURLSafeBase64(input: [*]const u16, len: usize) usize {
+ return byteLengthU16(input, len, .base64url);
+ }
+ export fn Bun__encoding__byteLengthUTF16AsUTF16(input: [*]const u16, len: usize) usize {
+ return byteLengthU16(input, len, .utf16le);
+ }
+ export fn Bun__encoding__byteLengthUTF16AsUTF8(input: [*]const u16, len: usize) usize {
+ return byteLengthU16(input, len, .utf8);
+ }
+ export fn Bun__encoding__byteLengthUTF16AsASCII(input: [*]const u8, len: usize) usize {
+ return byteLengthU8(input, len, .ascii);
+ }
+
+ export fn Bun__encoding__constructFromLatin1AsHex(globalObject: *JSGlobalObject, input: [*]const u8, len: usize) JSValue {
+ var slice = constructFromU8(input, len, .hex);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromLatin1AsASCII(globalObject: *JSGlobalObject, input: [*]const u8, len: usize) JSValue {
+ var slice = constructFromU8(input, len, .ascii);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromLatin1AsURLSafeBase64(globalObject: *JSGlobalObject, input: [*]const u8, len: usize) JSValue {
+ var slice = constructFromU8(input, len, .base64url);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromLatin1AsUTF16(globalObject: *JSGlobalObject, input: [*]const u8, len: usize) JSValue {
+ var slice = constructFromU8(input, len, .utf16le);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromLatin1AsUTF8(globalObject: *JSGlobalObject, input: [*]const u8, len: usize) JSValue {
+ var slice = constructFromU8(input, len, JSC.Node.Encoding.utf8);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromLatin1AsBase64(globalObject: *JSGlobalObject, input: [*]const u8, len: usize) JSValue {
+ var slice = constructFromU8(input, len, .base64);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromUTF16AsBase64(globalObject: *JSGlobalObject, input: [*]const u16, len: usize) JSValue {
+ var slice = constructFromU16(input, len, .base64);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromUTF16AsHex(globalObject: *JSGlobalObject, input: [*]const u16, len: usize) JSValue {
+ var slice = constructFromU16(input, len, .hex);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromUTF16AsURLSafeBase64(globalObject: *JSGlobalObject, input: [*]const u16, len: usize) JSValue {
+ var slice = constructFromU16(input, len, .base64url);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromUTF16AsUTF16(globalObject: *JSGlobalObject, input: [*]const u16, len: usize) JSValue {
+ var slice = constructFromU16(input, len, JSC.Node.Encoding.utf16le);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromUTF16AsUTF8(globalObject: *JSGlobalObject, input: [*]const u16, len: usize) JSValue {
+ var slice = constructFromU16(input, len, .utf8);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+ export fn Bun__encoding__constructFromUTF16AsASCII(globalObject: *JSGlobalObject, input: [*]const u16, len: usize) JSValue {
+ var slice = constructFromU16(input, len, .utf8);
+ return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator);
+ }
+
+ export fn Bun__encoding__toStringUTF16(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, JSC.Node.Encoding.utf16le);
+ }
+ export fn Bun__encoding__toStringUTF8(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, .utf8);
+ }
+ export fn Bun__encoding__toStringASCII(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, .ascii);
+ }
+ export fn Bun__encoding__toStringLatin1(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, .latin1);
+ }
+
+ export fn Bun__encoding__toStringHex(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, .hex);
+ }
+
+ export fn Bun__encoding__toStringBase64(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, .base64);
+ }
+
+ export fn Bun__encoding__toStringURLSafeBase64(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue {
+ return toString(input, len, globalObject, .base64url);
+ }
+
+ // pub fn writeUTF16AsUTF8(utf16: [*]const u16, len: usize, to: [*]u8, to_len: usize) callconv(.C) i32 {
+ // return @intCast(i32, strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, utf16[0..len]).written);
+ // }
+
+ pub fn toString(input_ptr: [*]const u8, len: usize, global: *JSGlobalObject, comptime encoding: JSC.Node.Encoding) JSValue {
+ if (len == 0)
+ return ZigString.Empty.toValue(global);
+
+ const input = input_ptr[0..len];
+ const allocator = VirtualMachine.vm.allocator;
+
+ switch (comptime encoding) {
+ .latin1, .ascii => {
+ var to = allocator.alloc(u8, len) catch return ZigString.init("Out of memory").toErrorInstance(global);
+
+ @memcpy(to.ptr, input_ptr, to.len);
+
+ // Hoping this gets auto vectorized
+ for (to[0..to.len]) |c, i| {
+ to[i] = @as(u8, @truncate(u7, c));
+ }
+
+ return ZigString.init(to).toExternalValue(global);
+ },
+ .buffer, .utf8 => {
+ // JSC only supports UTF-16 strings for non-ascii text
+ const converted = strings.toUTF16Alloc(allocator, input, false) catch return ZigString.init("Out of memory").toErrorInstance(global);
+ if (converted) |utf16| {
+ return ZigString.toExternalU16(utf16.ptr, utf16.len, global);
+ }
+
+ // If we get here, it means we can safely assume the string is 100% ASCII characters
+ // For this, we rely on the GC to manage the memory to minimize potential for memory leaks
+ return ZigString.init(input).toValueGC(global);
+ },
+ // potentially convert UTF-16 to UTF-8
+ JSC.Node.Encoding.ucs2, JSC.Node.Encoding.utf16le => {
+ const converted = strings.toUTF16Alloc(allocator, input, false) catch return ZigString.init("Out of memory").toErrorInstance(global);
+ if (converted) |utf16| {
+ return ZigString.toExternalU16(utf16.ptr, utf16.len, global);
+ }
+
+ var output = allocator.alloc(u8, input.len) catch return ZigString.init("Out of memory").toErrorInstance(global);
+ JSC.WTF.copyLCharsFromUCharSource(output.ptr, []align(1) const u16, @ptrCast([*]align(1) const u16, input.ptr)[0 .. input.len / 2]);
+ return ZigString.init(output).toExternalValue(global);
+ },
+
+ JSC.Node.Encoding.hex => {
+ var output = allocator.alloc(u8, input.len * 2) catch return ZigString.init("Out of memory").toErrorInstance(global);
+ const wrote = strings.encodeBytesToHex(output, input);
+ std.debug.assert(wrote == output.len);
+ var val = ZigString.init(output);
+ val.mark();
+ return val.toExternalValue(global);
+ },
+
+ JSC.Node.Encoding.base64url => {
+ return JSC.WTF.toBase64URLStringValue(input, global);
+ },
+
+ JSC.Node.Encoding.base64 => {
+ const to_len = bun.base64.encodeLen(input);
+ var to = allocator.alloc(u8, to_len) catch return ZigString.init("Out of memory").toErrorInstance(global);
+ const wrote = bun.base64.encode(to, input);
+ return ZigString.init(to[0..wrote]).toExternalValue(global);
+ },
+ }
+ }
+
+ pub fn writeU8(input: [*]const u8, len: usize, to: [*]u8, to_len: usize, comptime encoding: JSC.Node.Encoding) i64 {
+ if (len == 0 or to_len == 0)
+ return 0;
+
+ // TODO: increase temporary buffer size for larger amounts of data
+ // defer {
+ // if (comptime encoding.isBinaryToText()) {}
+ // }
+
+ // if (comptime encoding.isBinaryToText()) {}
+
+ switch (comptime encoding) {
+ JSC.Node.Encoding.buffer => {
+ const written = @minimum(len, to_len);
+ @memcpy(to, input, written);
+
+ return @intCast(i64, written);
+ },
+ .latin1, .ascii => {
+ const written = @minimum(len, to_len);
+ @memcpy(to, input, written);
+
+ // Hoping this gets auto vectorized
+ for (to[0..written]) |c, i| {
+ to[i] = @as(u8, @truncate(u7, c));
+ }
+
+ return @intCast(i64, written);
+ },
+ .utf8 => {
+ // need to encode
+ return @intCast(i64, strings.copyLatin1IntoUTF8(to[0..to_len], []const u8, input[0..len]).written);
+ },
+ // encode latin1 into UTF16
+ JSC.Node.Encoding.ucs2, JSC.Node.Encoding.utf16le => {
+ if (to_len < 2)
+ return 0;
+
+ if (std.mem.isAligned(@ptrToInt(to), @alignOf([*]u16))) {
+ var buf = input[0..len];
+ var output = @ptrCast([*]u16, @alignCast(@alignOf(u16), to))[0 .. to_len / 2];
+ return strings.copyLatin1IntoUTF16([]u16, output, []const u8, buf).written;
+ } else {
+ var buf = input[0..len];
+ var output = @ptrCast([*]align(1) u16, to)[0 .. to_len / 2];
+ return strings.copyLatin1IntoUTF16([]align(1) u16, output, []const u8, buf).written;
+ }
+ },
+
+ JSC.Node.Encoding.hex => {
+ return @intCast(i64, strings.decodeHexToBytes(to[0..to_len], u8, input[0..len]));
+ },
+
+ JSC.Node.Encoding.base64url => {
+ var slice = strings.trim(input[0..len], "\r\n\t " ++ [_]u8{std.ascii.control_code.VT});
+ if (slice.len == 0)
+ return 0;
+
+ if (strings.eqlComptime(slice[slice.len - 2 ..][0..2], "==")) {
+ slice = slice[0 .. slice.len - 2];
+ } else if (slice[slice.len - 1] == '=') {
+ slice = slice[0 .. slice.len - 1];
+ }
+
+ const wrote = bun.base64.urlsafe.decode(to[0..to_len], slice) catch |err| brk: {
+ if (err == error.NoSpaceLeft) {
+ break :brk to_len;
+ }
+
+ return -1;
+ };
+ return @intCast(i64, wrote);
+ },
+
+ JSC.Node.Encoding.base64 => {
+ var slice = strings.trim(input[0..len], "\r\n\t " ++ [_]u8{std.ascii.control_code.VT});
+ var outlen = bun.base64.decodeLen(slice);
+
+ return @intCast(i64, bun.base64.decode(to[0..outlen], slice).written);
+ },
+ // else => return 0,
+ }
+ }
+
+ pub fn byteLengthU8(input: [*]const u8, len: usize, comptime encoding: JSC.Node.Encoding) usize {
+ if (len == 0)
+ return 0;
+
+ switch (comptime encoding) {
+ .utf8 => {
+ return strings.elementLengthLatin1IntoUTF8([]const u8, input[0..len]);
+ },
+
+ .latin1, JSC.Node.Encoding.ascii, JSC.Node.Encoding.buffer => {
+ return len;
+ },
+
+ JSC.Node.Encoding.ucs2, JSC.Node.Encoding.utf16le => {
+ return strings.elementLengthUTF8IntoUTF16([]const u8, input[0..len]) * 2;
+ },
+
+ JSC.Node.Encoding.hex => {
+ return len * 2;
+ },
+
+ JSC.Node.Encoding.base64, JSC.Node.Encoding.base64url => {
+ return bun.base64.encodeLen(input[0..len]);
+ },
+ // else => return &[_]u8{};
+ }
+ }
+
+ pub fn writeU16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize, comptime encoding: JSC.Node.Encoding) i64 {
+ if (len == 0)
+ return 0;
+
+ switch (comptime encoding) {
+ .utf8 => {
+ return @intCast(i32, strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, input[0..len]).written);
+ },
+ // string is already encoded, just need to copy the data
+ .latin1, JSC.Node.Encoding.ascii, JSC.Node.Encoding.ucs2, JSC.Node.Encoding.buffer, JSC.Node.Encoding.utf16le => {
+ strings.copyU16IntoU8(to[0..to_len], []const u16, input[0..len]);
+
+ return @intCast(i64, @minimum(len, to_len));
+ },
+
+ JSC.Node.Encoding.hex => {
+ return @intCast(i64, strings.decodeHexToBytes(to[0..to_len], u16, input[0..len]));
+ },
+
+ JSC.Node.Encoding.base64, JSC.Node.Encoding.base64url => {
+ if (to_len < 2 or len == 0)
+ return 0;
+
+ // very very slow case!
+ // shouldn't really happen though
+ var transcoded = strings.toUTF8Alloc(bun.default_allocator, input[0..len]) catch return 0;
+ defer bun.default_allocator.free(transcoded);
+ return writeU8(transcoded.ptr, transcoded.len, to, to_len, encoding);
+ },
+ // else => return &[_]u8{};
+ }
+ }
+
+ /// Node returns imprecise byte length here
+ /// Should be fast enough for us to return precise length
+ pub fn byteLengthU16(input: [*]const u16, len: usize, comptime encoding: JSC.Node.Encoding) usize {
+ if (len == 0)
+ return 0;
+
+ switch (comptime encoding) {
+ // these should be the same size
+ .ascii, .latin1, .utf8 => {
+ return strings.elementLengthUTF16IntoUTF8([]const u16, input[0..len]);
+ },
+ JSC.Node.Encoding.ucs2, JSC.Node.Encoding.buffer, JSC.Node.Encoding.utf16le => {
+ return len * 2;
+ },
+
+ JSC.Node.Encoding.hex => {
+ return len;
+ },
+
+ JSC.Node.Encoding.base64, JSC.Node.Encoding.base64url => {
+ return bun.base64.encodeLen(input[0..len]);
+ },
+ // else => return &[_]u8{};
+ }
+ }
+
+ pub fn constructFromU8(input: [*]const u8, len: usize, comptime encoding: JSC.Node.Encoding) []u8 {
+ if (len == 0)
+ return &[_]u8{};
+
+ const allocator = VirtualMachine.vm.allocator;
+
+ switch (comptime encoding) {
+ JSC.Node.Encoding.buffer => {
+ var to = allocator.alloc(u8, len) catch return &[_]u8{};
+ @memcpy(to.ptr, input, len);
+
+ return to;
+ },
+ .latin1, .ascii => {
+ var to = allocator.alloc(u8, len) catch return &[_]u8{};
+ @memcpy(to.ptr, input, len);
+
+ // Hoping this gets auto vectorized
+ for (to[0..len]) |c, i| {
+ to[i] = @as(u8, @truncate(u7, c));
+ }
+
+ return to;
+ },
+ .utf8 => {
+ // need to encode
+ return strings.allocateLatin1IntoUTF8(allocator, []const u8, input[0..len]) catch return &[_]u8{};
+ },
+ // encode latin1 into UTF16
+ // return as bytes
+ JSC.Node.Encoding.ucs2, JSC.Node.Encoding.utf16le => {
+ var to = allocator.alloc(u16, len) catch return &[_]u8{};
+ _ = strings.copyLatin1IntoUTF16([]u16, to, []const u8, input[0..len]);
+ return std.mem.sliceAsBytes(to[0..len]);
+ },
+
+ JSC.Node.Encoding.hex => {
+ if (len < 2)
+ return &[_]u8{};
+
+ var to = allocator.alloc(u8, len / 2) catch return &[_]u8{};
+ return to[0..strings.decodeHexToBytes(to, u8, input[0..len])];
+ },
+
+ JSC.Node.Encoding.base64url => {
+ var slice = strings.trim(input[0..len], "\r\n\t " ++ [_]u8{std.ascii.control_code.VT});
+ if (slice.len == 0)
+ return &[_]u8{};
+
+ if (strings.eqlComptime(slice[slice.len - 2 ..][0..2], "==")) {
+ slice = slice[0 .. slice.len - 2];
+ } else if (slice[slice.len - 1] == '=') {
+ slice = slice[0 .. slice.len - 1];
+ }
+
+ const to_len = bun.base64.urlsafe.decoder.calcSizeForSlice(slice) catch unreachable;
+ var to = allocator.alloc(u8, to_len) catch return &[_]u8{};
+
+ const wrote = bun.base64.urlsafe.decode(to[0..to_len], slice) catch |err| brk: {
+ if (err == error.NoSpaceLeft) {
+ break :brk to_len;
+ }
+
+ return &[_]u8{};
+ };
+ return to[0..wrote];
+ },
+
+ JSC.Node.Encoding.base64 => {
+ var slice = strings.trim(input[0..len], "\r\n\t " ++ [_]u8{std.ascii.control_code.VT});
+ var outlen = bun.base64.decodeLen(slice);
+
+ var to = allocator.alloc(u8, outlen) catch return &[_]u8{};
+ const written = bun.base64.decode(to[0..outlen], slice).written;
+ return to[0..written];
+ },
+ // else => return 0,
+ }
+ }
+
+ pub fn constructFromU16(input: [*]const u16, len: usize, comptime encoding: JSC.Node.Encoding) []u8 {
+ if (len == 0)
+ return &[_]u8{};
+
+ const allocator = VirtualMachine.vm.allocator;
+
+ switch (comptime encoding) {
+ .utf8 => {
+ return strings.toUTF8AllocWithType(allocator, []const u16, input[0..len]) catch return &[_]u8{};
+ },
+ JSC.Node.Encoding.latin1, JSC.Node.Encoding.buffer, JSC.Node.Encoding.ascii => {
+ var to = allocator.alloc(u8, len) catch return &[_]u8{};
+ @memcpy(to.ptr, input, len);
+ for (to[0..len]) |c, i| {
+ to[i] = @as(u8, @truncate(u7, c));
+ }
+
+ return to;
+ },
+ // string is already encoded, just need to copy the data
+ JSC.Node.Encoding.ucs2, JSC.Node.Encoding.utf16le => {
+ var to = std.mem.sliceAsBytes(allocator.alloc(u16, len * 2) catch return &[_]u8{});
+ @memcpy(to.ptr, std.mem.sliceAsBytes(input[0..len]).ptr, std.mem.sliceAsBytes(input[0..len]).len);
+ return to;
+ },
+
+ JSC.Node.Encoding.hex => {
+ var to = allocator.alloc(u8, len * 2) catch return &[_]u8{};
+ return to[0..strings.decodeHexToBytes(to, u16, input[0..len])];
+ },
+
+ JSC.Node.Encoding.base64 => {
+
+ // very very slow case!
+ // shouldn't really happen though
+ var transcoded = strings.toUTF8Alloc(allocator, input[0..len]) catch return &[_]u8{};
+ defer allocator.free(transcoded);
+ return constructFromU8(transcoded.ptr, transcoded.len, .base64);
+ },
+
+ JSC.Node.Encoding.base64url => {
+
+ // very very slow case!
+ // shouldn't really happen though
+ var transcoded = strings.toUTF8Alloc(allocator, input[0..len]) catch return &[_]u8{};
+ defer allocator.free(transcoded);
+ return constructFromU8(transcoded.ptr, transcoded.len, .base64url);
+ },
+ // else => return 0,
+ }
+ }
+
+ comptime {
+ if (!JSC.is_bindgen) {
+ _ = Bun__encoding__writeLatin1AsHex;
+ _ = Bun__encoding__writeLatin1AsURLSafeBase64;
+ _ = Bun__encoding__writeLatin1AsUTF16;
+ _ = Bun__encoding__writeLatin1AsUTF8;
+ _ = Bun__encoding__writeLatin1AsBase64;
+ _ = Bun__encoding__writeUTF16AsBase64;
+ _ = Bun__encoding__writeUTF16AsHex;
+ _ = Bun__encoding__writeUTF16AsURLSafeBase64;
+ _ = Bun__encoding__writeUTF16AsUTF16;
+ _ = Bun__encoding__writeUTF16AsUTF8;
+ _ = Bun__encoding__writeLatin1AsASCII;
+ _ = Bun__encoding__writeUTF16AsASCII;
+
+ _ = Bun__encoding__byteLengthLatin1AsHex;
+ _ = Bun__encoding__byteLengthLatin1AsURLSafeBase64;
+ _ = Bun__encoding__byteLengthLatin1AsUTF16;
+ _ = Bun__encoding__byteLengthLatin1AsUTF8;
+ _ = Bun__encoding__byteLengthLatin1AsBase64;
+ _ = Bun__encoding__byteLengthUTF16AsBase64;
+ _ = Bun__encoding__byteLengthUTF16AsHex;
+ _ = Bun__encoding__byteLengthUTF16AsURLSafeBase64;
+ _ = Bun__encoding__byteLengthUTF16AsUTF16;
+ _ = Bun__encoding__byteLengthUTF16AsUTF8;
+ _ = Bun__encoding__byteLengthLatin1AsASCII;
+ _ = Bun__encoding__byteLengthUTF16AsASCII;
+
+ _ = Bun__encoding__toStringUTF16;
+ _ = Bun__encoding__toStringUTF8;
+ _ = Bun__encoding__toStringASCII;
+ _ = Bun__encoding__toStringLatin1;
+ _ = Bun__encoding__toStringHex;
+ _ = Bun__encoding__toStringBase64;
+ _ = Bun__encoding__toStringURLSafeBase64;
+
+ _ = Bun__encoding__constructFromLatin1AsHex;
+ _ = Bun__encoding__constructFromLatin1AsASCII;
+ _ = Bun__encoding__constructFromLatin1AsURLSafeBase64;
+ _ = Bun__encoding__constructFromLatin1AsUTF16;
+ _ = Bun__encoding__constructFromLatin1AsUTF8;
+ _ = Bun__encoding__constructFromLatin1AsBase64;
+ _ = Bun__encoding__constructFromUTF16AsBase64;
+ _ = Bun__encoding__constructFromUTF16AsHex;
+ _ = Bun__encoding__constructFromUTF16AsURLSafeBase64;
+ _ = Bun__encoding__constructFromUTF16AsUTF16;
+ _ = Bun__encoding__constructFromUTF16AsUTF8;
+ _ = Bun__encoding__constructFromUTF16AsASCII;
+ }
+ }
+};
+
+comptime {
+ if (!JSC.is_bindgen) {
+ std.testing.refAllDecls(Encoder);
+ }
+}
+
+test "Vec" {}
diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig
new file mode 100644
index 000000000..017edb805
--- /dev/null
+++ b/src/bun.js/webcore/response.zig
@@ -0,0 +1,4844 @@
+const std = @import("std");
+const Api = @import("../../api/schema.zig").Api;
+const bun = @import("../../global.zig");
+const RequestContext = @import("../../http.zig").RequestContext;
+const MimeType = @import("../../http.zig").MimeType;
+const ZigURL = @import("../../url.zig").URL;
+const HTTPClient = @import("http");
+const NetworkThread = HTTPClient.NetworkThread;
+const AsyncIO = NetworkThread.AsyncIO;
+const JSC = @import("javascript_core");
+const js = JSC.C;
+
+const Method = @import("../../http/method.zig").Method;
+const FetchHeaders = JSC.FetchHeaders;
+const ObjectPool = @import("../../pool.zig").ObjectPool;
+const SystemError = JSC.SystemError;
+const Output = @import("../../global.zig").Output;
+const MutableString = @import("../../global.zig").MutableString;
+const strings = @import("../../global.zig").strings;
+const string = @import("../../global.zig").string;
+const default_allocator = @import("../../global.zig").default_allocator;
+const FeatureFlags = @import("../../global.zig").FeatureFlags;
+const ArrayBuffer = @import("../base.zig").ArrayBuffer;
+const Properties = @import("../base.zig").Properties;
+const NewClass = @import("../base.zig").NewClass;
+const d = @import("../base.zig").d;
+const castObj = @import("../base.zig").castObj;
+const getAllocator = @import("../base.zig").getAllocator;
+const JSPrivateDataPtr = @import("../base.zig").JSPrivateDataPtr;
+const GetJSPrivateData = @import("../base.zig").GetJSPrivateData;
+const Environment = @import("../../env.zig");
+const ZigString = JSC.ZigString;
+const IdentityContext = @import("../../identity_context.zig").IdentityContext;
+const JSInternalPromise = JSC.JSInternalPromise;
+const JSPromise = JSC.JSPromise;
+const JSValue = JSC.JSValue;
+const JSError = JSC.JSError;
+const JSGlobalObject = JSC.JSGlobalObject;
+
+const VirtualMachine = @import("../javascript.zig").VirtualMachine;
+const Task = JSC.Task;
+const JSPrinter = @import("../../js_printer.zig");
+const picohttp = @import("picohttp");
+const StringJoiner = @import("../../string_joiner.zig");
+const uws = @import("uws");
+
+pub const Response = struct {
+ pub const Pool = struct {
+ response_objects_pool: [127]JSC.C.JSObjectRef = undefined,
+ response_objects_used: u8 = 0,
+
+ pub fn get(this: *Pool, ptr: *Response) ?JSC.C.JSObjectRef {
+ if (comptime JSC.is_bindgen)
+ unreachable;
+ if (this.response_objects_used > 0) {
+ var result = this.response_objects_pool[this.response_objects_used - 1];
+ this.response_objects_used -= 1;
+ if (JSC.C.JSObjectSetPrivate(result, JSPrivateDataPtr.init(ptr).ptr())) {
+ return result;
+ } else {
+ JSC.C.JSValueUnprotect(VirtualMachine.vm.global.ref(), result);
+ }
+ }
+
+ return null;
+ }
+
+ pub fn push(this: *Pool, globalThis: *JSC.JSGlobalObject, object: JSC.JSValue) void {
+ var remaining = this.response_objects_pool[@minimum(this.response_objects_used, this.response_objects_pool.len)..];
+ if (remaining.len == 0) {
+ JSC.C.JSValueUnprotect(globalThis.ref(), object.asObjectRef());
+ return;
+ }
+
+ if (object.as(Response)) |resp| {
+ _ = JSC.C.JSObjectSetPrivate(object.asObjectRef(), null);
+
+ _ = resp.body.use();
+ resp.finalize();
+ remaining[0] = object.asObjectRef();
+ this.response_objects_used += 1;
+ }
+ }
+ };
+
+ pub const Constructor = JSC.NewConstructor(
+ Response,
+ .{
+ .@"constructor" = constructor,
+ .@"json" = .{ .rfn = constructJSON },
+ .@"redirect" = .{ .rfn = constructRedirect },
+ .@"error" = .{ .rfn = constructError },
+ },
+ .{},
+ );
+
+ pub const Class = NewClass(
+ Response,
+ .{ .name = "Response" },
+ .{
+ .@"finalize" = finalize,
+ .@"text" = .{
+ .rfn = Response.getText,
+ .ts = d.ts{},
+ },
+ .@"json" = .{
+ .rfn = Response.getJSON,
+ .ts = d.ts{},
+ },
+ .@"arrayBuffer" = .{
+ .rfn = Response.getArrayBuffer,
+ .ts = d.ts{},
+ },
+ .@"blob" = .{
+ .rfn = Response.getBlob,
+ .ts = d.ts{},
+ },
+
+ .@"clone" = .{
+ .rfn = doClone,
+ .ts = d.ts{},
+ },
+ },
+ .{
+ .@"url" = .{
+ .@"get" = getURL,
+ .ro = true,
+ },
+
+ .@"ok" = .{
+ .@"get" = getOK,
+ .ro = true,
+ },
+ .@"status" = .{
+ .@"get" = getStatus,
+ .ro = true,
+ },
+ .@"statusText" = .{
+ .@"get" = getStatusText,
+ .ro = true,
+ },
+ .@"headers" = .{
+ .@"get" = getHeaders,
+ .ro = true,
+ },
+ .@"bodyUsed" = .{
+ .@"get" = getBodyUsed,
+ .ro = true,
+ },
+ .@"type" = .{
+ .@"get" = getResponseType,
+ .ro = true,
+ },
+ },
+ );
+
+ allocator: std.mem.Allocator,
+ body: Body,
+ url: string = "",
+ status_text: string = "",
+ redirected: bool = false,
+
+ pub fn getBodyValue(
+ this: *Response,
+ ) *Body.Value {
+ return &this.body.value;
+ }
+
+ pub inline fn statusCode(this: *const Response) u16 {
+ return this.body.init.status_code;
+ }
+
+ pub fn redirectLocation(this: *const Response) ?[]const u8 {
+ return this.header("location");
+ }
+
+ pub fn header(this: *const Response, comptime name: []const u8) ?[]const u8 {
+ return (this.body.init.headers orelse return null).get(name);
+ }
+
+ pub const Props = struct {};
+
+ pub fn writeFormat(this: *const Response, formatter: *JSC.Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void {
+ const Writer = @TypeOf(writer);
+ try formatter.writeIndent(Writer, writer);
+ try writer.print("Response ({}) {{\n", .{bun.fmt.size(this.body.len())});
+ {
+ formatter.indent += 1;
+ defer formatter.indent -|= 1;
+
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("ok: ");
+ formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.isOK()), .BooleanObject, enable_ansi_colors);
+ formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable;
+ try writer.writeAll("\n");
+
+ try this.body.writeFormat(formatter, writer, enable_ansi_colors);
+
+ formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable;
+ try writer.writeAll("\n");
+
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("url: \"");
+ try writer.print(comptime Output.prettyFmt("<r><b>{s}<r>", enable_ansi_colors), .{this.url});
+ try writer.writeAll("\"");
+ formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable;
+ try writer.writeAll("\n");
+
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("statusText: ");
+ try JSPrinter.writeJSONString(this.status_text, Writer, writer, false);
+ formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable;
+ try writer.writeAll("\n");
+
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("redirected: ");
+ formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.redirected), .BooleanObject, enable_ansi_colors);
+ }
+ try writer.writeAll("\n");
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("}");
+ }
+
+ pub fn isOK(this: *const Response) bool {
+ return this.body.init.status_code == 304 or (this.body.init.status_code >= 200 and this.body.init.status_code <= 299);
+ }
+
+ pub fn getURL(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ // https://developer.mozilla.org/en-US/docs/Web/API/Response/url
+ return ZigString.init(this.url).toValueGC(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getResponseType(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ if (this.body.init.status_code < 200) {
+ return ZigString.init("error").toValue(ctx.ptr()).asObjectRef();
+ }
+
+ return ZigString.init("basic").toValue(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getBodyUsed(
+ this: *Response,
+ _: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return JSC.JSValue.jsBoolean(this.body.value == .Used).asRef();
+ }
+
+ pub fn getStatusText(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ // https://developer.mozilla.org/en-US/docs/Web/API/Response/url
+ return ZigString.init(this.status_text).withEncoding().toValueGC(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getOK(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ // https://developer.mozilla.org/en-US/docs/Web/API/Response/ok
+ return js.JSValueMakeBoolean(ctx, this.isOK());
+ }
+
+ fn getOrCreateHeaders(this: *Response) *FetchHeaders {
+ if (this.body.init.headers == null) {
+ this.body.init.headers = FetchHeaders.createEmpty();
+ }
+ return this.body.init.headers.?;
+ }
+
+ pub fn getHeaders(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return this.getOrCreateHeaders().toJS(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn doClone(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ var cloned = this.clone(getAllocator(ctx), ctx.ptr());
+ var val = Response.makeMaybePooled(ctx, cloned);
+ if (this.body.init.headers) |headers| {
+ cloned.body.init.headers = headers.cloneThis();
+ }
+
+ return val;
+ }
+
+ pub fn makeMaybePooled(ctx: js.JSContextRef, ptr: *Response) JSC.C.JSObjectRef {
+ if (comptime JSC.is_bindgen)
+ unreachable;
+ var vm = ctx.bunVM();
+ if (vm.response_objects_pool) |pool| {
+ if (pool.get(ptr)) |object| {
+ JSC.C.JSValueUnprotect(ctx, object);
+ return object;
+ }
+ }
+
+ return Response.Class.make(ctx, ptr);
+ }
+
+ pub fn cloneInto(
+ this: *const Response,
+ new_response: *Response,
+ allocator: std.mem.Allocator,
+ globalThis: *JSGlobalObject,
+ ) void {
+ new_response.* = Response{
+ .allocator = allocator,
+ .body = this.body.clone(allocator, globalThis),
+ .url = allocator.dupe(u8, this.url) catch unreachable,
+ .status_text = allocator.dupe(u8, this.status_text) catch unreachable,
+ .redirected = this.redirected,
+ };
+ }
+
+ pub fn clone(this: *const Response, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) *Response {
+ var new_response = allocator.create(Response) catch unreachable;
+ this.cloneInto(new_response, allocator, globalThis);
+ return new_response;
+ }
+
+ pub usingnamespace BlobInterface(@This());
+
+ pub fn getStatus(
+ this: *Response,
+ ctx: js.JSContextRef,
+ _: js.JSValueRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ // https://developer.mozilla.org/en-US/docs/Web/API/Response/status
+ return js.JSValueMakeNumber(ctx, @intToFloat(f64, this.body.init.status_code));
+ }
+
+ pub fn finalize(
+ this: *Response,
+ ) void {
+ this.body.deinit(this.allocator);
+
+ var allocator = this.allocator;
+
+ if (this.status_text.len > 0) {
+ allocator.free(this.status_text);
+ }
+
+ if (this.url.len > 0) {
+ allocator.free(this.url);
+ }
+
+ allocator.destroy(this);
+ }
+
+ pub fn mimeType(response: *const Response, request_ctx_: ?*const RequestContext) string {
+ return mimeTypeWithDefault(response, MimeType.other, request_ctx_);
+ }
+
+ pub fn mimeTypeWithDefault(response: *const Response, default: MimeType, request_ctx_: ?*const RequestContext) string {
+ if (response.header("content-type")) |content_type| {
+ // Remember, we always lowercase it
+ // hopefully doesn't matter here tho
+ return content_type;
+ }
+
+ if (request_ctx_) |request_ctx| {
+ if (request_ctx.url.extname.len > 0) {
+ return MimeType.byExtension(request_ctx.url.extname).value;
+ }
+ }
+
+ switch (response.body.value) {
+ .Blob => |blob| {
+ if (blob.content_type.len > 0) {
+ return blob.content_type;
+ }
+
+ return default.value;
+ },
+ .Used, .Locked, .Empty, .Error => return default.value,
+ }
+ }
+
+ pub fn constructJSON(
+ _: void,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSObjectRef {
+ // https://github.com/remix-run/remix/blob/db2c31f64affb2095e4286b91306b96435967969/packages/remix-server-runtime/responses.ts#L4
+ var args = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), arguments);
+ // var response = getAllocator(ctx).create(Response) catch unreachable;
+
+ var response = Response{
+ .body = Body{
+ .init = Body.Init{
+ .status_code = 200,
+ },
+ .value = Body.Value.empty,
+ },
+ .allocator = getAllocator(ctx),
+ .url = "",
+ };
+
+ const json_value = args.nextEat() orelse JSC.JSValue.zero;
+
+ if (@enumToInt(json_value) != 0) {
+ var zig_str = JSC.ZigString.init("");
+ // calling JSON.stringify on an empty string adds extra quotes
+ // so this is correct
+ json_value.jsonStringify(ctx.ptr(), 0, &zig_str);
+
+ if (zig_str.len > 0) {
+ var zig_str_slice = zig_str.toSlice(getAllocator(ctx));
+
+ if (zig_str_slice.allocated) {
+ response.body.value = .{
+ .Blob = Blob.initWithAllASCII(zig_str_slice.mut(), zig_str_slice.allocator, ctx.ptr(), false),
+ };
+ } else {
+ response.body.value = .{
+ .Blob = Blob.initWithAllASCII(getAllocator(ctx).dupe(u8, zig_str_slice.slice()) catch unreachable, zig_str_slice.allocator, ctx.ptr(), true),
+ };
+ }
+ }
+ }
+
+ if (args.nextEat()) |init| {
+ if (init.isUndefinedOrNull()) {} else if (init.isNumber()) {
+ response.body.init.status_code = @intCast(u16, @minimum(@maximum(0, init.toInt32()), std.math.maxInt(u16)));
+ } else {
+ if (Body.Init.init(getAllocator(ctx), ctx, init.asObjectRef()) catch null) |_init| {
+ response.body.init = _init;
+ }
+ }
+ }
+
+ var headers_ref = response.getOrCreateHeaders();
+ headers_ref.putDefault("content-type", MimeType.json.value);
+ var ptr = response.allocator.create(Response) catch unreachable;
+ ptr.* = response;
+
+ return Response.makeMaybePooled(ctx, ptr);
+ }
+ pub fn constructRedirect(
+ _: void,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSObjectRef {
+ // https://github.com/remix-run/remix/blob/db2c31f64affb2095e4286b91306b96435967969/packages/remix-server-runtime/responses.ts#L4
+ var args = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), arguments);
+ // var response = getAllocator(ctx).create(Response) catch unreachable;
+
+ var response = Response{
+ .body = Body{
+ .init = Body.Init{
+ .status_code = 302,
+ },
+ .value = Body.Value.empty,
+ },
+ .allocator = getAllocator(ctx),
+ .url = "",
+ };
+
+ const url_string_value = args.nextEat() orelse JSC.JSValue.zero;
+ var url_string = ZigString.init("");
+
+ if (@enumToInt(url_string_value) != 0) {
+ url_string = url_string_value.getZigString(ctx.ptr());
+ }
+ var url_string_slice = url_string.toSlice(getAllocator(ctx));
+ defer url_string_slice.deinit();
+
+ if (args.nextEat()) |init| {
+ if (init.isUndefinedOrNull()) {} else if (init.isNumber()) {
+ response.body.init.status_code = @intCast(u16, @minimum(@maximum(0, init.toInt32()), std.math.maxInt(u16)));
+ } else {
+ if (Body.Init.init(getAllocator(ctx), ctx, init.asObjectRef()) catch null) |_init| {
+ response.body.init = _init;
+ }
+ }
+ }
+
+ response.body.init.headers = response.getOrCreateHeaders();
+ response.body.init.status_code = 302;
+ var headers_ref = response.body.init.headers.?;
+ headers_ref.put("location", url_string_slice.slice());
+ var ptr = response.allocator.create(Response) catch unreachable;
+ ptr.* = response;
+
+ return Response.makeMaybePooled(ctx, ptr);
+ }
+ pub fn constructError(
+ _: void,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var response = getAllocator(ctx).create(Response) catch unreachable;
+ response.* = Response{
+ .body = Body{
+ .init = Body.Init{
+ .status_code = 0,
+ },
+ .value = Body.Value.empty,
+ },
+ .allocator = getAllocator(ctx),
+ .url = "",
+ };
+
+ return Response.makeMaybePooled(
+ ctx,
+ response,
+ );
+ }
+
+ pub fn constructor(
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ const body: Body = brk: {
+ switch (arguments.len) {
+ 0 => {
+ break :brk Body.@"200"(ctx);
+ },
+ 1 => {
+ break :brk Body.extract(ctx, arguments[0], exception);
+ },
+ else => {
+ if (js.JSValueGetType(ctx, arguments[1]) == js.JSType.kJSTypeObject) {
+ break :brk Body.extractWithInit(ctx, arguments[0], arguments[1], exception);
+ } else {
+ break :brk Body.extract(ctx, arguments[0], exception);
+ }
+ },
+ }
+ unreachable;
+ };
+
+ var response = getAllocator(ctx).create(Response) catch unreachable;
+ response.* = Response{
+ .body = body,
+ .allocator = getAllocator(ctx),
+ .url = "",
+ };
+ return Response.makeMaybePooled(
+ ctx,
+ response,
+ );
+ }
+};
+
+const null_fd = std.math.maxInt(JSC.Node.FileDescriptor);
+
+pub const Fetch = struct {
+ const headers_string = "headers";
+ const method_string = "method";
+
+ var fetch_body_string: MutableString = undefined;
+ var fetch_body_string_loaded = false;
+
+ const JSType = js.JSType;
+
+ const fetch_error_no_args = "fetch() expects a string but received no arguments.";
+ const fetch_error_blank_url = "fetch() URL must not be a blank string.";
+ const JSTypeErrorEnum = std.enums.EnumArray(JSType, string);
+ const fetch_type_error_names: JSTypeErrorEnum = brk: {
+ var errors = JSTypeErrorEnum.initUndefined();
+ errors.set(JSType.kJSTypeUndefined, "Undefined");
+ errors.set(JSType.kJSTypeNull, "Null");
+ errors.set(JSType.kJSTypeBoolean, "Boolean");
+ errors.set(JSType.kJSTypeNumber, "Number");
+ errors.set(JSType.kJSTypeString, "String");
+ errors.set(JSType.kJSTypeObject, "Object");
+ errors.set(JSType.kJSTypeSymbol, "Symbol");
+ break :brk errors;
+ };
+
+ const fetch_type_error_string_values = .{
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeUndefined)}),
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeNull)}),
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeBoolean)}),
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeNumber)}),
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeString)}),
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeObject)}),
+ std.fmt.comptimePrint("fetch() expects a string, but received {s}", .{fetch_type_error_names.get(JSType.kJSTypeSymbol)}),
+ };
+
+ const fetch_type_error_strings: JSTypeErrorEnum = brk: {
+ var errors = JSTypeErrorEnum.initUndefined();
+ errors.set(
+ JSType.kJSTypeUndefined,
+ std.mem.span(fetch_type_error_string_values[0]),
+ );
+ errors.set(
+ JSType.kJSTypeNull,
+ std.mem.span(fetch_type_error_string_values[1]),
+ );
+ errors.set(
+ JSType.kJSTypeBoolean,
+ std.mem.span(fetch_type_error_string_values[2]),
+ );
+ errors.set(
+ JSType.kJSTypeNumber,
+ std.mem.span(fetch_type_error_string_values[3]),
+ );
+ errors.set(
+ JSType.kJSTypeString,
+ std.mem.span(fetch_type_error_string_values[4]),
+ );
+ errors.set(
+ JSType.kJSTypeObject,
+ std.mem.span(fetch_type_error_string_values[5]),
+ );
+ errors.set(
+ JSType.kJSTypeSymbol,
+ std.mem.span(fetch_type_error_string_values[6]),
+ );
+ break :brk errors;
+ };
+
+ pub const Class = NewClass(
+ void,
+ .{ .name = "fetch" },
+ .{
+ .@"call" = .{
+ .rfn = Fetch.call,
+ .ts = d.ts{},
+ },
+ },
+ .{},
+ );
+
+ pub const FetchTasklet = struct {
+ promise: *JSInternalPromise = undefined,
+ http: HTTPClient.AsyncHTTP = undefined,
+ status: Status = Status.pending,
+ javascript_vm: *VirtualMachine = undefined,
+ global_this: *JSGlobalObject = undefined,
+
+ empty_request_body: MutableString = undefined,
+ // pooled_body: *BodyPool.Node = undefined,
+ this_object: js.JSObjectRef = null,
+ resolve: js.JSObjectRef = null,
+ reject: js.JSObjectRef = null,
+ context: FetchTaskletContext = undefined,
+ response_buffer: MutableString = undefined,
+
+ blob_store: ?*Blob.Store = null,
+
+ const Pool = ObjectPool(FetchTasklet, init, true, 32);
+ const BodyPool = ObjectPool(MutableString, MutableString.init2048, true, 8);
+ pub const FetchTaskletContext = struct {
+ tasklet: *FetchTasklet,
+ };
+
+ pub fn init(_: std.mem.Allocator) anyerror!FetchTasklet {
+ return FetchTasklet{};
+ }
+
+ pub const Status = enum(u8) {
+ pending,
+ running,
+ done,
+ };
+
+ pub fn onDone(this: *FetchTasklet) void {
+ if (comptime JSC.is_bindgen)
+ unreachable;
+ var args = [1]js.JSValueRef{undefined};
+
+ var callback_object = switch (this.http.state.load(.Monotonic)) {
+ .success => this.resolve,
+ .fail => this.reject,
+ else => unreachable,
+ };
+
+ args[0] = switch (this.http.state.load(.Monotonic)) {
+ .success => this.onResolve().asObjectRef(),
+ .fail => this.onReject().asObjectRef(),
+ else => unreachable,
+ };
+
+ _ = js.JSObjectCallAsFunction(this.global_this.ref(), callback_object, null, 1, &args, null);
+
+ this.release();
+ }
+
+ pub fn reset(_: *FetchTasklet) void {}
+
+ pub fn release(this: *FetchTasklet) void {
+ js.JSValueUnprotect(this.global_this.ref(), this.resolve);
+ js.JSValueUnprotect(this.global_this.ref(), this.reject);
+ js.JSValueUnprotect(this.global_this.ref(), this.this_object);
+
+ this.global_this = undefined;
+ this.javascript_vm = undefined;
+ this.promise = undefined;
+ this.status = Status.pending;
+ // var pooled = this.pooled_body;
+ // BodyPool.release(pooled);
+ // this.pooled_body = undefined;
+ this.http = undefined;
+ this.this_object = null;
+ this.resolve = null;
+ this.reject = null;
+ Pool.release(@fieldParentPtr(Pool.Node, "data", this));
+ }
+
+ pub const FetchResolver = struct {
+ pub fn call(
+ _: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: usize,
+ arguments: [*c]const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) callconv(.C) js.JSObjectRef {
+ return JSPrivateDataPtr.from(js.JSObjectGetPrivate(arguments[0]))
+ .get(FetchTaskletContext).?.tasklet.onResolve().asObjectRef();
+ // return js.JSObjectGetPrivate(arguments[0]).? .tasklet.onResolve().asObjectRef();
+ }
+ };
+
+ pub const FetchRejecter = struct {
+ pub fn call(
+ _: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: usize,
+ arguments: [*c]const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) callconv(.C) js.JSObjectRef {
+ return JSPrivateDataPtr.from(js.JSObjectGetPrivate(arguments[0]))
+ .get(FetchTaskletContext).?.tasklet.onReject().asObjectRef();
+ }
+ };
+
+ pub fn onReject(this: *FetchTasklet) JSValue {
+ if (this.blob_store) |store| {
+ store.deref();
+ }
+ const fetch_error = std.fmt.allocPrint(
+ default_allocator,
+ "fetch() failed – {s}\nurl: \"{s}\"",
+ .{
+ @errorName(this.http.err orelse error.HTTPFail),
+ this.http.url.href,
+ },
+ ) catch unreachable;
+ return ZigString.init(fetch_error).toErrorInstance(this.global_this);
+ }
+
+ pub fn onResolve(this: *FetchTasklet) JSValue {
+ var allocator = default_allocator;
+ var http_response = this.http.response.?;
+ var response = allocator.create(Response) catch unreachable;
+ if (this.blob_store) |store| {
+ store.deref();
+ }
+ response.* = Response{
+ .allocator = allocator,
+ .url = allocator.dupe(u8, this.http.url.href) catch unreachable,
+ .status_text = allocator.dupe(u8, http_response.status) catch unreachable,
+ .redirected = this.http.redirect_count > 0,
+ .body = .{
+ .init = .{
+ .headers = FetchHeaders.createFromPicoHeaders(this.global_this, http_response.headers),
+ .status_code = @truncate(u16, http_response.status_code),
+ },
+ .value = .{
+ .Blob = Blob.init(this.http.response_buffer.toOwnedSliceLeaky(), allocator, this.global_this),
+ },
+ },
+ };
+ return JSValue.fromRef(Response.makeMaybePooled(@ptrCast(js.JSContextRef, this.global_this), response));
+ }
+
+ pub fn get(
+ allocator: std.mem.Allocator,
+ method: Method,
+ url: ZigURL,
+ headers: Headers.Entries,
+ headers_buf: string,
+ request_body: ?*MutableString,
+ timeout: usize,
+ request_body_store: ?*Blob.Store,
+ ) !*FetchTasklet.Pool.Node {
+ var linked_list = FetchTasklet.Pool.get(allocator);
+ linked_list.data.javascript_vm = VirtualMachine.vm;
+ linked_list.data.empty_request_body = MutableString.init(allocator, 0) catch unreachable;
+ // linked_list.data.pooled_body = BodyPool.get(allocator);
+ linked_list.data.blob_store = request_body_store;
+ linked_list.data.response_buffer = MutableString.initEmpty(allocator);
+ linked_list.data.http = try HTTPClient.AsyncHTTP.init(
+ allocator,
+ method,
+ url,
+ headers,
+ headers_buf,
+ &linked_list.data.response_buffer,
+ request_body orelse &linked_list.data.empty_request_body,
+
+ timeout,
+ );
+ linked_list.data.context = .{ .tasklet = &linked_list.data };
+
+ return linked_list;
+ }
+
+ pub fn queue(
+ allocator: std.mem.Allocator,
+ global: *JSGlobalObject,
+ method: Method,
+ url: ZigURL,
+ headers: Headers.Entries,
+ headers_buf: string,
+ request_body: ?*MutableString,
+ timeout: usize,
+ request_body_store: ?*Blob.Store,
+ ) !*FetchTasklet.Pool.Node {
+ var node = try get(allocator, method, url, headers, headers_buf, request_body, timeout, request_body_store);
+ node.data.promise = JSInternalPromise.create(global);
+
+ node.data.global_this = global;
+ node.data.http.callback = callback;
+ var batch = NetworkThread.Batch{};
+ node.data.http.schedule(allocator, &batch);
+ NetworkThread.global.pool.schedule(batch);
+ VirtualMachine.vm.active_tasks +|= 1;
+ return node;
+ }
+
+ pub fn callback(http_: *HTTPClient.AsyncHTTP) void {
+ var task: *FetchTasklet = @fieldParentPtr(FetchTasklet, "http", http_);
+ @atomicStore(Status, &task.status, Status.done, .Monotonic);
+ task.javascript_vm.eventLoop().enqueueTaskConcurrent(Task.init(task));
+ }
+ };
+
+ pub fn call(
+ _: void,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var globalThis = ctx.ptr();
+
+ if (arguments.len == 0) {
+ const fetch_error = fetch_error_no_args;
+ return JSPromise.rejectedPromiseValue(globalThis, ZigString.init(fetch_error).toErrorInstance(globalThis)).asRef();
+ }
+
+ var headers: ?Headers = null;
+ var body: MutableString = MutableString.initEmpty(bun.default_allocator);
+ var method = Method.GET;
+ var args = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), arguments);
+ var url: ZigURL = undefined;
+ var first_arg = args.nextEat().?;
+ var blob_store: ?*Blob.Store = null;
+ if (first_arg.isString()) {
+ var url_zig_str = ZigString.init("");
+ JSValue.fromRef(arguments[0]).toZigString(&url_zig_str, globalThis);
+ var url_str = url_zig_str.slice();
+
+ if (url_str.len == 0) {
+ const fetch_error = fetch_error_blank_url;
+ return JSPromise.rejectedPromiseValue(globalThis, ZigString.init(fetch_error).toErrorInstance(globalThis)).asRef();
+ }
+
+ if (url_str[0] == '/') {
+ url_str = strings.append(getAllocator(ctx), VirtualMachine.vm.bundler.options.origin.origin, url_str) catch unreachable;
+ } else {
+ url_str = getAllocator(ctx).dupe(u8, url_str) catch unreachable;
+ }
+
+ NetworkThread.init() catch @panic("Failed to start network thread");
+ url = ZigURL.parse(url_str);
+
+ if (arguments.len >= 2 and js.JSValueIsObject(ctx, arguments[1])) {
+ var options = JSValue.fromRef(arguments[1]);
+ if (options.get(ctx.ptr(), "method")) |method_| {
+ var slice_ = method_.toSlice(ctx.ptr(), getAllocator(ctx));
+ defer slice_.deinit();
+ method = Method.which(slice_.slice()) orelse .GET;
+ }
+
+ if (options.get(ctx.ptr(), "headers")) |headers_| {
+ if (headers_.as(FetchHeaders)) |headers__| {
+ headers = Headers.from(headers__, bun.default_allocator) catch unreachable;
+ // TODO: make this one pass
+ } else if (FetchHeaders.createFromJS(ctx.ptr(), headers_)) |headers__| {
+ headers = Headers.from(headers__, bun.default_allocator) catch unreachable;
+ headers__.deref();
+ }
+ }
+
+ if (options.get(ctx.ptr(), "body")) |body__| {
+ if (Blob.fromJS(ctx.ptr(), body__, true, false)) |new_blob| {
+ if (new_blob.size > 0) {
+ body = MutableString{
+ .list = std.ArrayListUnmanaged(u8){
+ .items = bun.constStrToU8(new_blob.sharedView()),
+ .capacity = new_blob.size,
+ },
+ .allocator = bun.default_allocator,
+ };
+ blob_store = new_blob.store;
+ }
+ // transfer is unnecessary here because this is a new slice
+ //new_blob.transfer();
+ } else |_| {
+ return JSPromise.rejectedPromiseValue(globalThis, ZigString.init("fetch() received invalid body").toErrorInstance(globalThis)).asRef();
+ }
+ }
+ }
+ } else if (first_arg.asCheckLoaded(Request)) |request| {
+ url = ZigURL.parse(request.url.dupe(getAllocator(ctx)) catch unreachable);
+ method = request.method;
+ if (request.headers) |head| {
+ headers = Headers.from(head, bun.default_allocator) catch unreachable;
+ }
+ var blob = request.body.use();
+ // TODO: make RequestBody _NOT_ a MutableString
+ body = MutableString{
+ .list = std.ArrayListUnmanaged(u8){
+ .items = bun.constStrToU8(blob.sharedView()),
+ .capacity = bun.constStrToU8(blob.sharedView()).len,
+ },
+ .allocator = blob.allocator orelse bun.default_allocator,
+ };
+ blob_store = blob.store;
+ } else {
+ const fetch_error = fetch_type_error_strings.get(js.JSValueGetType(ctx, arguments[0]));
+ return JSPromise.rejectedPromiseValue(globalThis, ZigString.init(fetch_error).toErrorInstance(globalThis)).asRef();
+ }
+
+ var header_entries: Headers.Entries = .{};
+ var header_buf: string = "";
+
+ if (headers) |head| {
+ header_entries = head.entries;
+ header_buf = head.buf.items;
+ }
+ var resolve = js.JSObjectMakeFunctionWithCallback(ctx, null, Fetch.FetchTasklet.FetchResolver.call);
+ var reject = js.JSObjectMakeFunctionWithCallback(ctx, null, Fetch.FetchTasklet.FetchRejecter.call);
+
+ js.JSValueProtect(ctx, resolve);
+ js.JSValueProtect(ctx, reject);
+
+ var request_body: ?*MutableString = null;
+ if (body.list.items.len > 0) {
+ var mutable = bun.default_allocator.create(MutableString) catch unreachable;
+ mutable.* = body;
+ request_body = mutable;
+ }
+
+ // var resolve = FetchTasklet.FetchResolver.Class.make(ctx: js.JSContextRef, ptr: *ZigType)
+ var queued = FetchTasklet.queue(
+ default_allocator,
+ globalThis,
+ method,
+ url,
+ header_entries,
+ header_buf,
+ request_body,
+ std.time.ns_per_hour,
+ blob_store,
+ ) catch unreachable;
+ queued.data.this_object = js.JSObjectMake(ctx, null, JSPrivateDataPtr.from(&queued.data.context).ptr());
+ js.JSValueProtect(ctx, queued.data.this_object);
+
+ var promise = js.JSObjectMakeDeferredPromise(ctx, &resolve, &reject, exception);
+ queued.data.reject = reject;
+ queued.data.resolve = resolve;
+
+ return promise;
+ // queued.data.promise.create(globalThis: *JSGlobalObject)
+ }
+};
+
+// https://developer.mozilla.org/en-US/docs/Web/API/Headers
+pub const Headers = struct {
+ pub usingnamespace HTTPClient.Headers;
+ entries: Headers.Entries = .{},
+ buf: std.ArrayListUnmanaged(u8) = .{},
+ allocator: std.mem.Allocator,
+
+ pub fn asStr(this: *const Headers, ptr: Api.StringPointer) []const u8 {
+ return if (ptr.offset + ptr.length <= this.buf.items.len)
+ this.buf.items[ptr.offset..][0..ptr.length]
+ else
+ "";
+ }
+
+ pub fn from(headers_ref: *FetchHeaders, allocator: std.mem.Allocator) !Headers {
+ var header_count: u32 = 0;
+ var buf_len: u32 = 0;
+ headers_ref.count(&header_count, &buf_len);
+ var headers = Headers{
+ .entries = .{},
+ .buf = .{},
+ .allocator = allocator,
+ };
+ headers.entries.ensureTotalCapacity(allocator, header_count) catch unreachable;
+ headers.entries.len = header_count;
+ headers.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch unreachable;
+ headers.buf.items.len = buf_len;
+ var sliced = headers.entries.slice();
+ var names = sliced.items(.name);
+ var values = sliced.items(.value);
+ headers_ref.copyTo(names.ptr, values.ptr, headers.buf.items.ptr);
+ return headers;
+ }
+};
+
+const PathOrBlob = union(enum) {
+ path: JSC.Node.PathOrFileDescriptor,
+ blob: Blob,
+
+ pub fn fromJS(ctx: js.JSContextRef, args: *JSC.Node.ArgumentsSlice, exception: js.ExceptionRef) ?PathOrBlob {
+ if (JSC.Node.PathOrFileDescriptor.fromJS(ctx, args, exception)) |path| {
+ return PathOrBlob{ .path = .{
+ .path = .{
+ .string = bun.PathString.init((bun.default_allocator.dupeZ(u8, path.path.slice()) catch unreachable)[0..path.path.slice().len]),
+ },
+ } };
+ }
+
+ const arg = args.nextEat() orelse return null;
+
+ if (arg.as(Blob)) |blob| {
+ return PathOrBlob{
+ .blob = blob.dupe(),
+ };
+ }
+
+ return null;
+ }
+};
+
+pub const Blob = struct {
+ size: SizeType = 0,
+ offset: SizeType = 0,
+ /// When set, the blob will be freed on finalization callbacks
+ /// If the blob is contained in Response or Request, this must be null
+ allocator: ?std.mem.Allocator = null,
+ store: ?*Store = null,
+ content_type: string = "",
+ content_type_allocated: bool = false,
+
+ /// JavaScriptCore strings are either latin1 or UTF-16
+ /// When UTF-16, they're nearly always due to non-ascii characters
+ is_all_ascii: ?bool = null,
+
+ globalThis: *JSGlobalObject = undefined,
+
+ /// Max int of double precision
+ /// 9 petabytes is probably enough for awhile
+ /// We want to avoid coercing to a BigInt because that's a heap allocation
+ /// and it's generally just harder to use
+ pub const SizeType = u52;
+ pub const max_size = std.math.maxInt(SizeType);
+
+ const CopyFilePromiseHandler = struct {
+ promise: *JSPromise,
+ globalThis: *JSGlobalObject,
+ pub fn run(handler: *@This(), blob_: Store.CopyFile.ResultType) void {
+ var promise = handler.promise;
+ var globalThis = handler.globalThis;
+ bun.default_allocator.destroy(handler);
+ var blob = blob_ catch |err| {
+ var error_string = ZigString.init(
+ std.fmt.allocPrint(bun.default_allocator, "Failed to write file \"{s}\"", .{std.mem.span(@errorName(err))}) catch unreachable,
+ );
+ error_string.mark();
+
+ promise.reject(globalThis, error_string.toErrorInstance(globalThis));
+ return;
+ };
+ var _blob = bun.default_allocator.create(Blob) catch unreachable;
+ _blob.* = blob;
+ _blob.allocator = bun.default_allocator;
+ promise.resolve(
+ globalThis,
+ );
+ }
+ };
+
+ const WriteFileWaitFromLockedValueTask = struct {
+ file_blob: Blob,
+ globalThis: *JSGlobalObject,
+ promise: *JSPromise,
+
+ pub fn thenWrap(this: *anyopaque, value: *Body.Value) void {
+ then(bun.cast(*WriteFileWaitFromLockedValueTask, this), value);
+ }
+
+ pub fn then(this: *WriteFileWaitFromLockedValueTask, value: *Body.Value) void {
+ var promise = this.promise;
+ var globalThis = this.globalThis;
+ var file_blob = this.file_blob;
+ switch (value.*) {
+ .Error => |err| {
+ file_blob.detach();
+ _ = value.use();
+ bun.default_allocator.destroy(this);
+ promise.reject(globalThis, err);
+ },
+ .Used => {
+ file_blob.detach();
+ _ = value.use();
+ bun.default_allocator.destroy(this);
+ promise.reject(globalThis, ZigString.init("Body was used after it was consumed").toErrorInstance(globalThis));
+ },
+ .Empty, .Blob => {
+ var blob = value.use();
+ // TODO: this should be one promise not two!
+ const new_promise = writeFileWithSourceDestination(globalThis.ref(), &blob, &file_blob);
+ if (JSC.JSValue.fromRef(new_promise.?).asPromise()) |_promise| {
+ switch (_promise.status(globalThis.vm())) {
+ .Pending => {
+ promise.resolve(
+ globalThis,
+ JSC.JSValue.fromRef(new_promise.?),
+ );
+ },
+ .Rejected => {
+ promise.reject(globalThis, _promise.result(globalThis.vm()));
+ },
+ else => {
+ promise.resolve(globalThis, _promise.result(globalThis.vm()));
+ },
+ }
+ } else if (JSC.JSValue.fromRef(new_promise.?).asInternalPromise()) |_promise| {
+ switch (_promise.status(globalThis.vm())) {
+ .Pending => {
+ promise.resolve(
+ globalThis,
+ JSC.JSValue.fromRef(new_promise.?),
+ );
+ },
+ .Rejected => {
+ promise.reject(globalThis, _promise.result(globalThis.vm()));
+ },
+ else => {
+ promise.resolve(globalThis, _promise.result(globalThis.vm()));
+ },
+ }
+ }
+
+ file_blob.detach();
+ bun.default_allocator.destroy(this);
+ },
+ .Locked => {
+ value.Locked.callback = thenWrap;
+ value.Locked.task = this;
+ },
+ }
+ }
+ };
+
+ pub fn writeFileWithSourceDestination(
+ ctx: JSC.C.JSContextRef,
+ source_blob: *Blob,
+ destination_blob: *Blob,
+ ) js.JSObjectRef {
+ const destination_type = std.meta.activeTag(destination_blob.store.?.data);
+
+ // Writing an empty string to a file is a no-op
+ if (source_blob.store == null) {
+ destination_blob.detach();
+ return JSC.JSPromise.resolvedPromiseValue(ctx.ptr(), JSC.JSValue.jsNumber(0)).asObjectRef();
+ }
+
+ const source_type = std.meta.activeTag(source_blob.store.?.data);
+
+ if (destination_type == .file and source_type == .bytes) {
+ var write_file_promise = bun.default_allocator.create(WriteFilePromise) catch unreachable;
+ write_file_promise.* = .{
+ .promise = JSC.JSPromise.create(ctx.ptr()),
+ .globalThis = ctx.ptr(),
+ };
+ JSC.C.JSValueProtect(ctx, write_file_promise.promise.asValue(ctx.ptr()).asObjectRef());
+
+ var file_copier = Store.WriteFile.create(
+ bun.default_allocator,
+ destination_blob.*,
+ source_blob.*,
+ *WriteFilePromise,
+ write_file_promise,
+ WriteFilePromise.run,
+ ) catch unreachable;
+ var task = Store.WriteFile.WriteFileTask.createOnJSThread(bun.default_allocator, ctx.ptr(), file_copier) catch unreachable;
+ task.schedule();
+ return write_file_promise.promise.asValue(ctx.ptr()).asObjectRef();
+ }
+ // If this is file <> file, we can just copy the file
+ else if (destination_type == .file and source_type == .file) {
+ var file_copier = Store.CopyFile.create(
+ bun.default_allocator,
+ destination_blob.store.?,
+ source_blob.store.?,
+
+ destination_blob.offset,
+ destination_blob.size,
+ ctx.ptr(),
+ ) catch unreachable;
+ file_copier.schedule();
+ return file_copier.promise.asObjectRef();
+ } else if (destination_type == .bytes and source_type == .bytes) {
+ // If this is bytes <> bytes, we can just duplicate it
+ // this is an edgecase
+ // it will happen if someone did Bun.write(new Blob([123]), new Blob([456]))
+ // eventually, this could be like Buffer.concat
+ var clone = source_blob.dupe();
+ clone.allocator = bun.default_allocator;
+ var cloned = bun.default_allocator.create(Blob) catch unreachable;
+ cloned.* = clone;
+ return JSPromise.resolvedPromiseValue(ctx.ptr(), JSC.JSValue.fromRef(Blob.Class.make(ctx, cloned))).asObjectRef();
+ } else if (destination_type == .bytes and source_type == .file) {
+ return JSPromise.resolvedPromiseValue(
+ ctx.ptr(),
+ JSC.JSValue.fromRef(
+ source_blob.getSlice(ctx, undefined, undefined, &.{}, null),
+ ),
+ ).asObjectRef();
+ }
+
+ unreachable;
+ }
+ pub fn writeFile(
+ _: void,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var args = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), arguments);
+ // accept a path or a blob
+ var path_or_blob = PathOrBlob.fromJS(ctx, &args, exception) orelse {
+ exception.* = JSC.toInvalidArguments("Bun.write expects a path, file descriptor or a blob", .{}, ctx).asObjectRef();
+ return null;
+ };
+
+ // if path_or_blob is a path, convert it into a file blob
+ var destination_blob: Blob = if (path_or_blob == .path)
+ Blob.findOrCreateFileFromPath(path_or_blob.path, ctx.ptr())
+ else
+ path_or_blob.blob.dupe();
+
+ if (destination_blob.store == null) {
+ exception.* = JSC.toInvalidArguments("Writing to an empty blob is not implemented yet", .{}, ctx).asObjectRef();
+ return null;
+ }
+
+ var data = args.nextEat() orelse {
+ exception.* = JSC.toInvalidArguments("Bun.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}, ctx).asObjectRef();
+ return null;
+ };
+
+ if (data.isUndefinedOrNull() or data.isEmpty()) {
+ exception.* = JSC.toInvalidArguments("Bun.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}, ctx).asObjectRef();
+ return null;
+ }
+
+ // TODO: implement a writeev() fast path
+ var source_blob: Blob = brk: {
+ if (data.as(Response)) |response| {
+ switch (response.body.value) {
+ .Used, .Empty, .Blob => {
+ break :brk response.body.use();
+ },
+ .Error => {
+ destination_blob.detach();
+ const err = response.body.value.Error;
+ JSC.C.JSValueUnprotect(ctx, err.asObjectRef());
+ _ = response.body.value.use();
+ return JSC.JSPromise.rejectedPromiseValue(ctx.ptr(), err).asObjectRef();
+ },
+ .Locked => {
+ var task = bun.default_allocator.create(WriteFileWaitFromLockedValueTask) catch unreachable;
+ var promise = JSC.JSPromise.create(ctx.ptr());
+ task.* = WriteFileWaitFromLockedValueTask{
+ .globalThis = ctx.ptr(),
+ .file_blob = destination_blob,
+ .promise = promise,
+ };
+
+ response.body.value.Locked.task = task;
+ response.body.value.Locked.callback = WriteFileWaitFromLockedValueTask.thenWrap;
+
+ return promise.asValue(ctx.ptr()).asObjectRef();
+ },
+ }
+ }
+
+ if (data.as(Request)) |request| {
+ switch (request.body) {
+ .Used, .Empty, .Blob => {
+ break :brk request.body.use();
+ },
+ .Error => {
+ destination_blob.detach();
+ const err = request.body.Error;
+ JSC.C.JSValueUnprotect(ctx, err.asObjectRef());
+ _ = request.body.use();
+ return JSC.JSPromise.rejectedPromiseValue(ctx.ptr(), err).asObjectRef();
+ },
+ .Locked => {
+ var task = bun.default_allocator.create(WriteFileWaitFromLockedValueTask) catch unreachable;
+ var promise = JSC.JSPromise.create(ctx.ptr());
+ task.* = WriteFileWaitFromLockedValueTask{
+ .globalThis = ctx.ptr(),
+ .file_blob = destination_blob,
+ .promise = promise,
+ };
+
+ request.body.Locked.task = task;
+ request.body.Locked.callback = WriteFileWaitFromLockedValueTask.thenWrap;
+
+ return promise.asValue(ctx.ptr()).asObjectRef();
+ },
+ }
+ }
+
+ break :brk Blob.fromJS(
+ ctx.ptr(),
+ data,
+ false,
+ false,
+ ) catch |err| {
+ if (err == error.InvalidArguments) {
+ exception.* = JSC.toInvalidArguments(
+ "Expected an Array",
+ .{},
+ ctx,
+ ).asObjectRef();
+ return null;
+ }
+
+ exception.* = JSC.toInvalidArguments(
+ "Out of memory",
+ .{},
+ ctx,
+ ).asObjectRef();
+ return null;
+ };
+ };
+
+ return writeFileWithSourceDestination(ctx, &source_blob, &destination_blob);
+ }
+
+ pub fn constructFile(
+ _: void,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var args = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), arguments);
+ defer args.deinit();
+
+ var path = JSC.Node.PathOrFileDescriptor.fromJS(ctx, &args, exception) orelse {
+ exception.* = JSC.toInvalidArguments("Expected file path string or file descriptor", .{}, ctx).asObjectRef();
+ return js.JSValueMakeUndefined(ctx);
+ };
+
+ const blob = Blob.findOrCreateFileFromPath(path, ctx.ptr());
+
+ var ptr = bun.default_allocator.create(Blob) catch unreachable;
+ ptr.* = blob;
+ ptr.allocator = bun.default_allocator;
+ return Blob.Class.make(ctx, ptr);
+ }
+
+ pub fn findOrCreateFileFromPath(path_: JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject) Blob {
+ var path = path_;
+ var vm = globalThis.bunVM();
+ if (vm.getFileBlob(path)) |blob| {
+ blob.ref();
+ return Blob.initWithStore(blob, globalThis);
+ }
+
+ switch (path) {
+ .path => {
+ path.path = .{
+ .string = bun.PathString.init(
+ (bun.default_allocator.dupeZ(u8, path.path.slice()) catch unreachable)[0..path.path.slice().len],
+ ),
+ };
+ },
+ .fd => {
+ switch (path.fd) {
+ std.os.STDIN_FILENO => return Blob.initWithStore(
+ vm.rareData().stdin(),
+ globalThis,
+ ),
+ std.os.STDERR_FILENO => return Blob.initWithStore(
+ vm.rareData().stderr(),
+ globalThis,
+ ),
+ std.os.STDOUT_FILENO => return Blob.initWithStore(
+ vm.rareData().stdout(),
+ globalThis,
+ ),
+ else => {},
+ }
+ },
+ }
+
+ const result = Blob.initWithStore(Blob.Store.initFile(path, null, bun.default_allocator) catch unreachable, globalThis);
+ vm.putFileBlob(path, result.store.?) catch unreachable;
+ return result;
+ }
+
+ pub const Store = struct {
+ data: Data,
+
+ mime_type: MimeType = MimeType.other,
+ ref_count: u32 = 0,
+ is_all_ascii: ?bool = null,
+ allocator: std.mem.Allocator,
+
+ pub fn size(this: *const Store) SizeType {
+ return switch (this.data) {
+ .bytes => this.data.bytes.len,
+ .file => Blob.max_size,
+ };
+ }
+
+ pub const Map = std.HashMap(u64, *JSC.WebCore.Blob.Store, IdentityContext(u64), 80);
+
+ pub const Data = union(enum) {
+ bytes: ByteStore,
+ file: FileStore,
+ };
+
+ pub fn ref(this: *Store) void {
+ this.ref_count += 1;
+ }
+
+ pub fn external(ptr: ?*anyopaque, _: ?*anyopaque, _: usize) callconv(.C) void {
+ if (ptr == null) return;
+ var this = bun.cast(*Store, ptr);
+ this.deref();
+ }
+
+ pub fn initFile(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?HTTPClient.MimeType, allocator: std.mem.Allocator) !*Store {
+ var store = try allocator.create(Blob.Store);
+ store.* = .{
+ .data = .{ .file = FileStore.init(
+ pathlike,
+ mime_type orelse brk: {
+ if (pathlike == .path) {
+ const sliced = pathlike.path.slice();
+ if (sliced.len > 0) {
+ var extname = std.fs.path.extension(sliced);
+ extname = std.mem.trim(u8, extname, ".");
+ if (HTTPClient.MimeType.byExtensionNoDefault(extname)) |mime| {
+ break :brk mime;
+ }
+ }
+ }
+
+ break :brk null;
+ },
+ ) },
+ .allocator = allocator,
+ .ref_count = 1,
+ };
+ return store;
+ }
+
+ pub fn init(bytes: []u8, allocator: std.mem.Allocator) !*Store {
+ var store = try allocator.create(Blob.Store);
+ store.* = .{
+ .data = .{ .bytes = ByteStore.init(bytes, allocator) },
+ .allocator = allocator,
+ .ref_count = 1,
+ };
+ return store;
+ }
+
+ pub fn sharedView(this: Store) []u8 {
+ if (this.data == .bytes)
+ return this.data.bytes.slice();
+
+ return &[_]u8{};
+ }
+
+ pub fn deref(this: *Blob.Store) void {
+ this.ref_count -= 1;
+ if (this.ref_count == 0) {
+ this.deinit();
+ }
+ }
+
+ pub fn deinit(this: *Blob.Store) void {
+ switch (this.data) {
+ .bytes => |*bytes| {
+ bytes.deinit();
+ },
+ .file => |file| {
+ VirtualMachine.vm.removeFileBlob(file.pathlike);
+ },
+ }
+
+ this.allocator.destroy(this);
+ }
+
+ pub fn fromArrayList(list: std.ArrayListUnmanaged(u8), allocator: std.mem.Allocator) !*Blob.Store {
+ return try Blob.Store.init(list.items, allocator);
+ }
+
+ pub fn FileOpenerMixin(comptime This: type) type {
+ return struct {
+ const __opener_flags = std.os.O.NONBLOCK | std.os.O.CLOEXEC;
+ const open_flags_ = if (@hasDecl(This, "open_flags"))
+ This.open_flags | __opener_flags
+ else
+ std.os.O.RDONLY | __opener_flags;
+
+ pub fn getFdMac(this: *This) AsyncIO.OpenError!JSC.Node.FileDescriptor {
+ var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var path_string = if (@hasField(This, "file_store"))
+ this.file_store.pathlike.path
+ else
+ this.file_blob.store.?.data.file.pathlike.path;
+
+ var path = path_string.sliceZ(&buf);
+
+ this.opened_fd = switch (JSC.Node.Syscall.open(path, open_flags_, JSC.Node.default_permission)) {
+ .result => |fd| fd,
+ .err => |err| {
+ this.errno = AsyncIO.asError(err.errno);
+ this.system_error = err.withPath(path_string.slice()).toSystemError();
+
+ return @errSetCast(AsyncIO.OpenError, this.errno.?);
+ },
+ };
+
+ return this.opened_fd;
+ }
+
+ pub fn getFd(this: *This) AsyncIO.OpenError!JSC.Node.FileDescriptor {
+ if (this.opened_fd != null_fd) {
+ return this.opened_fd;
+ }
+
+ if (comptime Environment.isMac) {
+ return try this.getFdMac();
+ } else {
+ return try this.getFdLinux();
+ }
+ }
+
+ pub fn getFdLinux(this: *This) AsyncIO.OpenError!JSC.Node.FileDescriptor {
+ var aio = &AsyncIO.global;
+
+ var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var path_string = if (@hasField(This, "file_store"))
+ this.file_store.pathlike.path
+ else
+ this.file_blob.store.?.data.file.pathlike.path;
+
+ var path = path_string.sliceZ(&buf);
+
+ aio.open(
+ *This,
+ this,
+ onOpen,
+ &this.open_completion,
+ path,
+ open_flags_,
+ JSC.Node.default_permission,
+ );
+
+ suspend {
+ this.open_frame = @frame().*;
+ }
+
+ if (this.errno) |errno| {
+ this.system_error = .{
+ .syscall = ZigString.init("open"),
+ .code = ZigString.init(std.mem.span(@errorName(errno))),
+ .path = ZigString.init(path_string.slice()),
+ };
+
+ return @errSetCast(AsyncIO.OpenError, errno);
+ }
+
+ return this.opened_fd;
+ }
+
+ pub fn onOpen(this: *This, completion: *HTTPClient.NetworkThread.Completion, result: AsyncIO.OpenError!JSC.Node.FileDescriptor) void {
+ this.opened_fd = result catch {
+ this.errno = AsyncIO.asError(-completion.result);
+
+ if (comptime Environment.isLinux) resume this.open_frame;
+ return;
+ };
+
+ if (comptime Environment.isLinux) resume this.open_frame;
+ }
+ };
+ }
+
+ pub fn FileCloserMixin(comptime This: type) type {
+ return struct {
+ pub fn doClose(this: *This) AsyncIO.CloseError!void {
+ var aio = &AsyncIO.global;
+
+ aio.close(
+ *This,
+ this,
+ onClose,
+ &this.close_completion,
+ this.opened_fd,
+ );
+ this.opened_fd = null_fd;
+
+ suspend {
+ this.close_frame = @frame().*;
+ }
+
+ if (@hasField(This, "errno")) {
+ if (this.errno) |errno| {
+ return @errSetCast(AsyncIO.CloseError, errno);
+ }
+ }
+ }
+
+ pub fn onClose(this: *This, _: *HTTPClient.NetworkThread.Completion, result: AsyncIO.CloseError!void) void {
+ result catch |err| {
+ if (@hasField(This, "errno")) {
+ this.errno = err;
+ }
+ resume this.close_frame;
+ return;
+ };
+
+ resume this.close_frame;
+ }
+ };
+ }
+
+ pub const ReadFile = struct {
+ const OpenFrameType = if (Environment.isMac)
+ void
+ else
+ @Frame(ReadFile.getFdLinux);
+ file_store: FileStore,
+ byte_store: ByteStore = ByteStore{ .allocator = bun.default_allocator },
+ store: ?*Store = null,
+ offset: SizeType = 0,
+ max_length: SizeType = Blob.max_size,
+ open_frame: OpenFrameType = undefined,
+ read_frame: @Frame(ReadFile.doRead) = undefined,
+ close_frame: @Frame(ReadFile.doClose) = undefined,
+ open_completion: HTTPClient.NetworkThread.Completion = undefined,
+ opened_fd: JSC.Node.FileDescriptor = null_fd,
+ read_completion: HTTPClient.NetworkThread.Completion = undefined,
+ read_len: SizeType = 0,
+ read_off: SizeType = 0,
+ size: SizeType = 0,
+ buffer: []u8 = undefined,
+ runAsyncFrame: @Frame(ReadFile.runAsync) = undefined,
+ close_completion: HTTPClient.NetworkThread.Completion = undefined,
+ task: HTTPClient.NetworkThread.Task = undefined,
+ system_error: ?JSC.SystemError = null,
+ errno: ?anyerror = null,
+ onCompleteCtx: *anyopaque = undefined,
+ onCompleteCallback: OnReadFileCallback = undefined,
+
+ convert_to_byte_blob: bool = false,
+
+ pub const Read = struct {
+ buf: []u8,
+ is_temporary: bool = false,
+ };
+ pub const ResultType = SystemError.Maybe(Read);
+
+ pub const OnReadFileCallback = fn (ctx: *anyopaque, bytes: ResultType) void;
+
+ pub usingnamespace FileOpenerMixin(ReadFile);
+ pub usingnamespace FileCloserMixin(ReadFile);
+
+ pub fn createWithCtx(
+ allocator: std.mem.Allocator,
+ store: *Store,
+ onReadFileContext: *anyopaque,
+ onCompleteCallback: OnReadFileCallback,
+ off: SizeType,
+ max_len: SizeType,
+ ) !*ReadFile {
+ var read_file = try allocator.create(ReadFile);
+ read_file.* = ReadFile{
+ .file_store = store.data.file,
+ .offset = off,
+ .max_length = max_len,
+ .store = store,
+ .onCompleteCtx = onReadFileContext,
+ .onCompleteCallback = onCompleteCallback,
+ };
+ store.ref();
+ return read_file;
+ }
+
+ pub fn create(
+ allocator: std.mem.Allocator,
+ store: *Store,
+ off: SizeType,
+ max_len: SizeType,
+ comptime Context: type,
+ context: Context,
+ comptime callback: fn (ctx: Context, bytes: ResultType) void,
+ ) !*ReadFile {
+ const Handler = struct {
+ pub fn run(ptr: *anyopaque, bytes: ResultType) void {
+ callback(bun.cast(Context, ptr), bytes);
+ }
+ };
+
+ return try ReadFile.createWithCtx(allocator, store, @ptrCast(*anyopaque, context), Handler.run, off, max_len);
+ }
+
+ pub fn doRead(this: *ReadFile) AsyncIO.ReadError!SizeType {
+ var aio = &AsyncIO.global;
+
+ var remaining = this.buffer[this.read_off..];
+ this.read_len = 0;
+ aio.read(
+ *ReadFile,
+ this,
+ onRead,
+ &this.read_completion,
+ this.opened_fd,
+ remaining[0..@minimum(remaining.len, this.max_length - this.read_off)],
+ this.offset + this.read_off,
+ );
+
+ suspend {
+ this.read_frame = @frame().*;
+ }
+
+ if (this.errno) |errno| {
+ this.system_error = JSC.SystemError{
+ .code = ZigString.init(std.mem.span(@errorName(errno))),
+ .path = if (this.file_store.pathlike == .path)
+ ZigString.init(this.file_store.pathlike.path.slice())
+ else
+ ZigString.Empty,
+ .syscall = ZigString.init("read"),
+ };
+
+ return @errSetCast(AsyncIO.ReadError, errno);
+ }
+
+ return this.read_len;
+ }
+
+ pub const ReadFileTask = JSC.IOTask(@This());
+
+ pub fn then(this: *ReadFile, _: *JSC.JSGlobalObject) void {
+ var cb = this.onCompleteCallback;
+ var cb_ctx = this.onCompleteCtx;
+
+ if (this.store == null and this.system_error != null) {
+ var system_error = this.system_error.?;
+ bun.default_allocator.destroy(this);
+ cb(cb_ctx, ResultType{ .err = system_error });
+ return;
+ } else if (this.store == null) {
+ bun.default_allocator.destroy(this);
+ cb(cb_ctx, ResultType{ .err = SystemError{
+ .code = ZigString.init("INTERNAL_ERROR"),
+ .path = ZigString.Empty,
+ .message = ZigString.init("assertion failure - store should not be null"),
+ .syscall = ZigString.init("read"),
+ } });
+ return;
+ }
+ var store = this.store.?;
+
+ if (this.convert_to_byte_blob and this.file_store.pathlike == .path) {
+ VirtualMachine.vm.removeFileBlob(this.file_store.pathlike);
+ }
+
+ if (this.system_error) |err| {
+ bun.default_allocator.destroy(this);
+ store.deref();
+ cb(cb_ctx, ResultType{ .err = err });
+ return;
+ }
+
+ var buf = this.buffer;
+ const is_temporary = !this.convert_to_byte_blob;
+ if (this.convert_to_byte_blob) {
+ if (store.data == .bytes) {
+ bun.default_allocator.free(this.buffer);
+ buf = store.data.bytes.slice();
+ } else if (store.data == .file) {
+ if (this.file_store.pathlike == .path) {
+ if (this.file_store.pathlike.path == .string) {
+ bun.default_allocator.free(this.file_store.pathlike.path.slice());
+ }
+ }
+ store.data = .{ .bytes = ByteStore.init(buf, bun.default_allocator) };
+ }
+ }
+
+ bun.default_allocator.destroy(this);
+
+ // Attempt to free it as soon as possible
+ if (store.ref_count > 1) {
+ store.deref();
+ cb(cb_ctx, .{ .result = .{ .buf = buf, .is_temporary = is_temporary } });
+ } else {
+ cb(cb_ctx, .{ .result = .{ .buf = buf, .is_temporary = is_temporary } });
+ store.deref();
+ }
+ }
+ pub fn run(this: *ReadFile, task: *ReadFileTask) void {
+ var frame = HTTPClient.getAllocator().create(@Frame(runAsync)) catch unreachable;
+ _ = @asyncCall(std.mem.asBytes(frame), undefined, runAsync, .{ this, task });
+ }
+
+ pub fn onRead(this: *ReadFile, completion: *HTTPClient.NetworkThread.Completion, result: AsyncIO.ReadError!usize) void {
+ this.read_len = @truncate(SizeType, result catch |err| {
+ if (@hasField(HTTPClient.NetworkThread.Completion, "result")) {
+ this.errno = AsyncIO.asError(-completion.result);
+ this.system_error = (JSC.Node.Syscall.Error{
+ .errno = @intCast(JSC.Node.Syscall.Error.Int, -completion.result),
+ .syscall = .read,
+ }).toSystemError();
+ } else {
+ this.errno = err;
+ this.system_error = .{ .code = ZigString.init(std.mem.span(@errorName(err))), .syscall = ZigString.init("read") };
+ }
+ this.read_len = 0;
+ resume this.read_frame;
+ return;
+ });
+
+ resume this.read_frame;
+ }
+
+ fn runAsync(this: *ReadFile, task: *ReadFileTask) void {
+ this.runAsync_();
+ task.onFinish();
+
+ suspend {
+ HTTPClient.getAllocator().destroy(@frame());
+ }
+ }
+
+ fn runAsync_(this: *ReadFile) void {
+ if (this.file_store.pathlike == .fd) {
+ this.opened_fd = this.file_store.pathlike.fd;
+ }
+
+ const fd = this.getFd() catch return;
+ const needs_close = this.file_store.pathlike == .path and fd != null_fd and fd > 2;
+ const stat: std.os.Stat = switch (JSC.Node.Syscall.fstat(fd)) {
+ .result => |result| result,
+ .err => |err| {
+ this.errno = AsyncIO.asError(err.errno);
+ this.system_error = err.toSystemError();
+ return;
+ },
+ };
+ if (std.os.S.ISDIR(stat.mode)) {
+ this.errno = error.EISDIR;
+ this.system_error = JSC.SystemError{
+ .code = ZigString.init("EISDIR"),
+ .path = if (this.file_store.pathlike == .path)
+ ZigString.init(this.file_store.pathlike.path.slice())
+ else
+ ZigString.Empty,
+ .message = ZigString.init("Directories cannot be read like files"),
+ .syscall = ZigString.init("read"),
+ };
+ return;
+ }
+
+ if (stat.size > 0 and std.os.S.ISREG(stat.mode)) {
+ this.size = @minimum(
+ @truncate(SizeType, @intCast(SizeType, @maximum(@intCast(i64, stat.size), 0))),
+ this.max_length,
+ );
+ // read up to 4k at a time if
+ // they didn't explicitly set a size and we're reading from something that's not a regular file
+ } else if (stat.size == 0 and !std.os.S.ISREG(stat.mode)) {
+ this.size = if (this.max_length == Blob.max_size)
+ 4096
+ else
+ this.max_length;
+ }
+
+ if (this.size == 0) {
+ this.buffer = &[_]u8{};
+ this.byte_store = ByteStore.init(this.buffer, bun.default_allocator);
+
+ if (needs_close) {
+ this.doClose() catch {};
+ }
+ return;
+ }
+
+ var bytes = bun.default_allocator.alloc(u8, this.size) catch |err| {
+ this.errno = err;
+ if (needs_close) {
+ this.doClose() catch {};
+ }
+ return;
+ };
+ this.buffer = bytes;
+ this.convert_to_byte_blob = std.os.S.ISREG(stat.mode) and this.file_store.pathlike == .path;
+
+ var remain = bytes;
+ while (remain.len > 0) {
+ var read_len = this.doRead() catch {
+ if (needs_close) {
+ this.doClose() catch {};
+ }
+ return;
+ };
+ this.read_off += read_len;
+ if (read_len == 0) break;
+ remain = remain[read_len..];
+ }
+
+ _ = bun.default_allocator.resize(bytes, this.read_off);
+ this.buffer = bytes[0..this.read_off];
+ this.byte_store = ByteStore.init(this.buffer, bun.default_allocator);
+ }
+ };
+
+ pub const WriteFile = struct {
+ const OpenFrameType = if (Environment.isMac)
+ void
+ else
+ @Frame(WriteFile.getFdLinux);
+
+ file_blob: Blob,
+ bytes_blob: Blob,
+
+ opened_fd: JSC.Node.FileDescriptor = null_fd,
+ open_frame: OpenFrameType = undefined,
+ write_frame: @Frame(WriteFile.doWrite) = undefined,
+ close_frame: @Frame(WriteFile.doClose) = undefined,
+ system_error: ?JSC.SystemError = null,
+ errno: ?anyerror = null,
+ open_completion: HTTPClient.NetworkThread.Completion = undefined,
+
+ write_completion: HTTPClient.NetworkThread.Completion = undefined,
+ close_completion: HTTPClient.NetworkThread.Completion = undefined,
+ task: HTTPClient.NetworkThread.Task = undefined,
+
+ onCompleteCtx: *anyopaque = undefined,
+ onCompleteCallback: OnWriteFileCallback = undefined,
+ wrote: usize = 0,
+
+ pub const ResultType = SystemError.Maybe(SizeType);
+ pub const OnWriteFileCallback = fn (ctx: *anyopaque, count: ResultType) void;
+
+ pub usingnamespace FileOpenerMixin(WriteFile);
+ pub usingnamespace FileCloserMixin(WriteFile);
+
+ // Do not open with APPEND because we may use pwrite()
+ pub const open_flags = std.os.O.WRONLY | std.os.O.CREAT | std.os.O.TRUNC;
+
+ pub fn createWithCtx(
+ allocator: std.mem.Allocator,
+ file_blob: Blob,
+ bytes_blob: Blob,
+ onWriteFileContext: *anyopaque,
+ onCompleteCallback: OnWriteFileCallback,
+ ) !*WriteFile {
+ var read_file = try allocator.create(WriteFile);
+ read_file.* = WriteFile{
+ .file_blob = file_blob,
+ .bytes_blob = bytes_blob,
+ .onCompleteCtx = onWriteFileContext,
+ .onCompleteCallback = onCompleteCallback,
+ };
+ file_blob.store.?.ref();
+ bytes_blob.store.?.ref();
+ return read_file;
+ }
+
+ pub fn create(
+ allocator: std.mem.Allocator,
+ file_blob: Blob,
+ bytes_blob: Blob,
+ comptime Context: type,
+ context: Context,
+ comptime callback: fn (ctx: Context, bytes: ResultType) void,
+ ) !*WriteFile {
+ const Handler = struct {
+ pub fn run(ptr: *anyopaque, bytes: ResultType) void {
+ callback(bun.cast(Context, ptr), bytes);
+ }
+ };
+
+ return try WriteFile.createWithCtx(
+ allocator,
+ file_blob,
+ bytes_blob,
+ @ptrCast(*anyopaque, context),
+ Handler.run,
+ );
+ }
+
+ pub fn doWrite(
+ this: *WriteFile,
+ buffer: []const u8,
+ file_offset: u64,
+ ) AsyncIO.WriteError!SizeType {
+ var aio = &AsyncIO.global;
+ this.wrote = 0;
+ const fd = this.opened_fd;
+ aio.write(
+ *WriteFile,
+ this,
+ onWrite,
+ &this.write_completion,
+ fd,
+ buffer,
+ if (fd > 2) file_offset else 0,
+ );
+
+ suspend {
+ this.write_frame = @frame().*;
+ }
+
+ if (this.errno) |errno| {
+ this.system_error = this.system_error orelse JSC.SystemError{
+ .code = ZigString.init(std.mem.span(@errorName(errno))),
+ .syscall = ZigString.init("write"),
+ };
+ return @errSetCast(AsyncIO.WriteError, errno);
+ }
+
+ return @truncate(SizeType, this.wrote);
+ }
+
+ pub const WriteFileTask = JSC.IOTask(@This());
+
+ pub fn then(this: *WriteFile, _: *JSC.JSGlobalObject) void {
+ var cb = this.onCompleteCallback;
+ var cb_ctx = this.onCompleteCtx;
+
+ this.bytes_blob.store.?.deref();
+ this.file_blob.store.?.deref();
+
+ if (this.system_error) |err| {
+ bun.default_allocator.destroy(this);
+ cb(cb_ctx, .{
+ .err = err,
+ });
+ return;
+ }
+
+ const wrote = this.wrote;
+ bun.default_allocator.destroy(this);
+ cb(cb_ctx, .{ .result = @truncate(SizeType, wrote) });
+ }
+ pub fn run(this: *WriteFile, task: *WriteFileTask) void {
+ var frame = HTTPClient.getAllocator().create(@Frame(runAsync)) catch unreachable;
+ _ = @asyncCall(std.mem.asBytes(frame), undefined, runAsync, .{ this, task });
+ }
+
+ fn runAsync(this: *WriteFile, task: *WriteFileTask) void {
+ this._runAsync();
+ task.onFinish();
+ suspend {
+ HTTPClient.getAllocator().destroy(@frame());
+ }
+ }
+
+ pub fn onWrite(this: *WriteFile, _: *HTTPClient.NetworkThread.Completion, result: AsyncIO.WriteError!usize) void {
+ this.wrote += @truncate(SizeType, result catch |err| {
+ this.errno = err;
+ this.wrote = 0;
+ resume this.write_frame;
+ return;
+ });
+
+ resume this.write_frame;
+ }
+
+ fn _runAsync(this: *WriteFile) void {
+ const file = this.file_blob.store.?.data.file;
+ if (file.pathlike == .fd) {
+ this.opened_fd = file.pathlike.fd;
+ }
+
+ const fd = this.getFd() catch return;
+ const needs_close = file.pathlike == .path and fd > 2;
+
+ var remain = this.bytes_blob.sharedView();
+
+ var total_written: usize = 0;
+ var file_offset = this.file_blob.offset;
+
+ const end =
+ @minimum(this.file_blob.size, remain.len);
+
+ while (remain.len > 0 and total_written < end) {
+ const wrote_len = this.doWrite(remain, file_offset) catch {
+ if (needs_close) {
+ this.doClose() catch {};
+ }
+ this.wrote = @truncate(SizeType, total_written);
+ return;
+ };
+ remain = remain[wrote_len..];
+ total_written += wrote_len;
+ file_offset += wrote_len;
+ if (wrote_len == 0) break;
+ }
+
+ this.wrote = @truncate(SizeType, total_written);
+
+ if (needs_close) {
+ this.doClose() catch {};
+ }
+ }
+ };
+
+ pub const IOWhich = enum {
+ source,
+ destination,
+ both,
+ };
+
+ const unsupported_directory_error = SystemError{
+ .errno = @intCast(c_int, @enumToInt(bun.C.SystemErrno.EISDIR)),
+ .message = ZigString.init("That doesn't work on folders"),
+ .syscall = ZigString.init("fstat"),
+ };
+ const unsupported_non_regular_file_error = SystemError{
+ .errno = @intCast(c_int, @enumToInt(bun.C.SystemErrno.ENOTSUP)),
+ .message = ZigString.init("Non-regular files aren't supported yet"),
+ .syscall = ZigString.init("fstat"),
+ };
+
+ // blocking, but off the main thread
+ pub const CopyFile = struct {
+ destination_file_store: FileStore,
+ source_file_store: FileStore,
+ store: ?*Store = null,
+ source_store: ?*Store = null,
+ offset: SizeType = 0,
+ size: SizeType = 0,
+ max_length: SizeType = Blob.max_size,
+ destination_fd: JSC.Node.FileDescriptor = null_fd,
+ source_fd: JSC.Node.FileDescriptor = null_fd,
+
+ system_error: ?SystemError = null,
+
+ read_len: SizeType = 0,
+ read_off: SizeType = 0,
+
+ globalThis: *JSGlobalObject,
+
+ pub const ResultType = anyerror!SizeType;
+
+ pub const Callback = fn (ctx: *anyopaque, len: ResultType) void;
+ pub const CopyFilePromiseTask = JSC.ConcurrentPromiseTask(CopyFile);
+ pub const CopyFilePromiseTaskEventLoopTask = CopyFilePromiseTask.EventLoopTask;
+
+ pub fn create(
+ allocator: std.mem.Allocator,
+ store: *Store,
+ source_store: *Store,
+ off: SizeType,
+ max_len: SizeType,
+ globalThis: *JSC.JSGlobalObject,
+ ) !*CopyFilePromiseTask {
+ var read_file = try allocator.create(CopyFile);
+ read_file.* = CopyFile{
+ .store = store,
+ .source_store = source_store,
+ .offset = off,
+ .max_length = max_len,
+ .globalThis = globalThis,
+ .destination_file_store = store.data.file,
+ .source_file_store = source_store.data.file,
+ };
+ store.ref();
+ source_store.ref();
+ return try CopyFilePromiseTask.createOnJSThread(allocator, globalThis, read_file);
+ }
+
+ const linux = std.os.linux;
+ const darwin = std.os.darwin;
+
+ pub fn deinit(this: *CopyFile) void {
+ if (this.source_file_store.pathlike == .path) {
+ if (this.source_file_store.pathlike.path == .string and this.system_error == null) {
+ bun.default_allocator.free(bun.constStrToU8(this.source_file_store.pathlike.path.slice()));
+ }
+ }
+ this.store.?.deref();
+
+ bun.default_allocator.destroy(this);
+ }
+
+ pub fn reject(this: *CopyFile, promise: *JSC.JSInternalPromise) void {
+ var globalThis = this.globalThis;
+ var system_error: SystemError = this.system_error orelse SystemError{};
+ if (this.source_file_store.pathlike == .path and system_error.path.len == 0) {
+ system_error.path = ZigString.init(this.source_file_store.pathlike.path.slice());
+ system_error.path.mark();
+ }
+
+ if (system_error.message.len == 0) {
+ system_error.message = ZigString.init("Failed to copy file");
+ }
+
+ var instance = system_error.toErrorInstance(this.globalThis);
+ if (this.store) |store| {
+ store.deref();
+ }
+ promise.reject(globalThis, instance);
+ }
+
+ pub fn then(this: *CopyFile, promise: *JSC.JSInternalPromise) void {
+ this.source_store.?.deref();
+
+ if (this.system_error != null) {
+ this.reject(promise);
+ return;
+ }
+
+ promise.resolve(this.globalThis, JSC.JSValue.jsNumberFromUint64(this.read_len));
+ }
+
+ pub fn run(this: *CopyFile) void {
+ this.runAsync();
+ }
+
+ pub fn doClose(this: *CopyFile) void {
+ const close_input = this.destination_file_store.pathlike != .fd and this.destination_fd != null_fd;
+ const close_output = this.source_file_store.pathlike != .fd and this.source_fd != null_fd;
+
+ if (close_input and close_output) {
+ this.doCloseFile(.both);
+ } else if (close_input) {
+ this.doCloseFile(.destination);
+ } else if (close_output) {
+ this.doCloseFile(.source);
+ }
+ }
+
+ const os = std.os;
+
+ pub fn doCloseFile(this: *CopyFile, comptime which: IOWhich) void {
+ switch (which) {
+ .both => {
+ _ = JSC.Node.Syscall.close(this.destination_fd);
+ _ = JSC.Node.Syscall.close(this.source_fd);
+ },
+ .destination => {
+ _ = JSC.Node.Syscall.close(this.destination_fd);
+ },
+ .source => {
+ _ = JSC.Node.Syscall.close(this.source_fd);
+ },
+ }
+ }
+
+ const O = if (Environment.isLinux) linux.O else std.os.O;
+ const open_destination_flags = O.CLOEXEC | O.CREAT | O.WRONLY | O.TRUNC;
+ const open_source_flags = O.CLOEXEC | O.RDONLY;
+
+ pub fn doOpenFile(this: *CopyFile, comptime which: IOWhich) !void {
+ // open source file first
+ // if it fails, we don't want the extra destination file hanging out
+ if (which == .both or which == .source) {
+ this.source_fd = switch (JSC.Node.Syscall.open(
+ this.source_file_store.pathlike.path.sliceZAssume(),
+ open_source_flags,
+ 0,
+ )) {
+ .result => |result| result,
+ .err => |errno| {
+ this.system_error = errno.toSystemError();
+ return AsyncIO.asError(errno.errno);
+ },
+ };
+ }
+
+ if (which == .both or which == .destination) {
+ this.destination_fd = switch (JSC.Node.Syscall.open(
+ this.destination_file_store.pathlike.path.sliceZAssume(),
+ open_destination_flags,
+ JSC.Node.default_permission,
+ )) {
+ .result => |result| result,
+ .err => |errno| {
+ if (which == .both) {
+ _ = JSC.Node.Syscall.close(this.source_fd);
+ this.source_fd = 0;
+ }
+
+ this.system_error = errno.toSystemError();
+ return AsyncIO.asError(errno.errno);
+ },
+ };
+ }
+ }
+
+ const TryWith = enum {
+ sendfile,
+ copy_file_range,
+ splice,
+
+ pub const tag = std.EnumMap(TryWith, JSC.Node.Syscall.Tag).init(.{
+ .sendfile = .sendfile,
+ .copy_file_range = .copy_file_range,
+ .splice = .splice,
+ });
+ };
+
+ pub fn doCopyFileRange(
+ this: *CopyFile,
+ comptime use: TryWith,
+ comptime clear_append_if_invalid: bool,
+ ) anyerror!void {
+ this.read_off += this.offset;
+
+ var remain = @as(usize, this.max_length);
+ if (remain == max_size or remain == 0) {
+ // sometimes stat lies
+ // let's give it 4096 and see how it goes
+ remain = 4096;
+ }
+
+ var total_written: usize = 0;
+ const src_fd = this.source_fd;
+ const dest_fd = this.destination_fd;
+
+ defer {
+ this.read_len = @truncate(SizeType, total_written);
+ }
+
+ var has_unset_append = false;
+
+ while (true) {
+ const written = switch (comptime use) {
+ .copy_file_range => linux.copy_file_range(src_fd, null, dest_fd, null, remain, 0),
+ .sendfile => linux.sendfile(dest_fd, src_fd, null, remain),
+ .splice => bun.C.splice(src_fd, null, dest_fd, null, remain, 0),
+ };
+
+ switch (linux.getErrno(written)) {
+ .SUCCESS => {},
+
+ .INVAL => {
+ if (comptime clear_append_if_invalid) {
+ if (!has_unset_append) {
+ // https://kylelaker.com/2018/08/31/stdout-oappend.html
+ // make() can set STDOUT / STDERR to O_APPEND
+ // this messes up sendfile()
+ has_unset_append = true;
+ const flags = linux.fcntl(dest_fd, linux.F.GETFL, 0);
+ if ((flags & O.APPEND) != 0) {
+ _ = linux.fcntl(dest_fd, linux.F.SETFL, flags ^ O.APPEND);
+ continue;
+ }
+ }
+ }
+
+ this.system_error = (JSC.Node.Syscall.Error{
+ .errno = @intCast(JSC.Node.Syscall.Error.Int, @enumToInt(linux.E.INVAL)),
+ .syscall = TryWith.tag.get(use).?,
+ }).toSystemError();
+ return AsyncIO.asError(linux.E.INVAL);
+ },
+ else => |errno| {
+ this.system_error = (JSC.Node.Syscall.Error{
+ .errno = @intCast(JSC.Node.Syscall.Error.Int, @enumToInt(errno)),
+ .syscall = TryWith.tag.get(use).?,
+ }).toSystemError();
+ return AsyncIO.asError(errno);
+ },
+ }
+
+ // wrote zero bytes means EOF
+ remain -|= written;
+ total_written += written;
+ if (written == 0 or remain == 0) break;
+ }
+ }
+
+ pub fn doFCopyFile(this: *CopyFile) anyerror!void {
+ switch (JSC.Node.Syscall.fcopyfile(this.source_fd, this.destination_fd, os.system.COPYFILE_DATA)) {
+ .err => |errno| {
+ this.system_error = errno.toSystemError();
+
+ return AsyncIO.asError(errno.errno);
+ },
+ .result => {},
+ }
+ }
+
+ pub fn doClonefile(this: *CopyFile) anyerror!void {
+ var source_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+ var dest_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
+
+ switch (JSC.Node.Syscall.clonefile(
+ this.source_file_store.pathlike.path.sliceZ(&source_buf),
+ this.destination_file_store.pathlike.path.sliceZ(
+ &dest_buf,
+ ),
+ )) {
+ .err => |errno| {
+ this.system_error = errno.toSystemError();
+ return AsyncIO.asError(errno.errno);
+ },
+ .result => {},
+ }
+ }
+
+ pub fn runAsync(this: *CopyFile) void {
+ // defer task.onFinish();
+
+ var stat_: ?std.os.Stat = null;
+
+ if (this.destination_file_store.pathlike == .fd) {
+ this.destination_fd = this.destination_file_store.pathlike.fd;
+ }
+
+ if (this.source_file_store.pathlike == .fd) {
+ this.source_fd = this.source_file_store.pathlike.fd;
+ }
+
+ // Do we need to open both files?
+ if (this.destination_fd == null_fd and this.source_fd == null_fd) {
+
+ // First, we attempt to clonefile() on macOS
+ // This is the fastest way to copy a file.
+ if (comptime Environment.isMac) {
+ if (this.offset == 0 and this.source_file_store.pathlike == .path and this.destination_file_store.pathlike == .path) {
+ do_clonefile: {
+
+ // stat the output file, make sure it:
+ // 1. Exists
+ switch (JSC.Node.Syscall.stat(this.source_file_store.pathlike.path.sliceZAssume())) {
+ .result => |result| {
+ stat_ = result;
+
+ if (os.S.ISDIR(result.mode)) {
+ this.system_error = unsupported_directory_error;
+ return;
+ }
+
+ if (!os.S.ISREG(result.mode))
+ break :do_clonefile;
+ },
+ .err => |err| {
+ // If we can't stat it, we also can't copy it.
+ this.system_error = err.toSystemError();
+ return;
+ },
+ }
+
+ if (this.doClonefile()) {
+ if (this.max_length != Blob.max_size and this.max_length < @intCast(SizeType, stat_.?.size)) {
+ // If this fails...well, there's not much we can do about it.
+ _ = bun.C.truncate(
+ this.destination_file_store.pathlike.path.sliceZAssume(),
+ @intCast(std.os.off_t, this.max_length),
+ );
+ this.read_len = @intCast(SizeType, this.max_length);
+ } else {
+ this.read_len = @intCast(SizeType, stat_.?.size);
+ }
+ return;
+ } else |_| {
+
+ // this may still fail, in which case we just continue trying with fcopyfile
+ // it can fail when the input file already exists
+ // or if the output is not a directory
+ // or if it's a network volume
+ this.system_error = null;
+ }
+ }
+ }
+ }
+
+ this.doOpenFile(.both) catch return;
+ // Do we need to open only one file?
+ } else if (this.destination_fd == null_fd) {
+ this.source_fd = this.source_file_store.pathlike.fd;
+
+ this.doOpenFile(.destination) catch return;
+ // Do we need to open only one file?
+ } else if (this.source_fd == null_fd) {
+ this.destination_fd = this.destination_file_store.pathlike.fd;
+
+ this.doOpenFile(.source) catch return;
+ }
+
+ if (this.system_error != null) {
+ return;
+ }
+
+ std.debug.assert(this.destination_fd != null_fd);
+ std.debug.assert(this.source_fd != null_fd);
+
+ if (this.destination_file_store.pathlike == .fd) {}
+
+ const stat: std.os.Stat = stat_ orelse switch (JSC.Node.Syscall.fstat(this.source_fd)) {
+ .result => |result| result,
+ .err => |err| {
+ this.doClose();
+ this.system_error = err.toSystemError();
+ return;
+ },
+ };
+
+ if (os.S.ISDIR(stat.mode)) {
+ this.system_error = unsupported_directory_error;
+ this.doClose();
+ return;
+ }
+
+ if (stat.size != 0) {
+ this.max_length = @maximum(@minimum(@intCast(SizeType, stat.size), this.max_length), this.offset) - this.offset;
+ if (this.max_length == 0) {
+ this.doClose();
+ return;
+ }
+
+ if (os.S.ISREG(stat.mode) and
+ this.max_length > std.mem.page_size and
+ this.max_length != Blob.max_size)
+ {
+ bun.C.preallocate_file(this.destination_fd, 0, this.max_length) catch {};
+ }
+ }
+
+ if (comptime Environment.isLinux) {
+
+ // Bun.write(Bun.file("a"), Bun.file("b"))
+ if (os.S.ISREG(stat.mode) and (os.S.ISREG(this.destination_file_store.mode) or this.destination_file_store.mode == 0)) {
+ if (this.destination_file_store.is_atty orelse false) {
+ this.doCopyFileRange(.copy_file_range, true) catch {};
+ } else {
+ this.doCopyFileRange(.copy_file_range, false) catch {};
+ }
+
+ this.doClose();
+ return;
+ }
+
+ // $ bun run foo.js | bun run bar.js
+ if (os.S.ISFIFO(stat.mode) and os.S.ISFIFO(this.destination_file_store.mode)) {
+ if (this.destination_file_store.is_atty orelse false) {
+ this.doCopyFileRange(.splice, true) catch {};
+ } else {
+ this.doCopyFileRange(.splice, false) catch {};
+ }
+
+ this.doClose();
+ return;
+ }
+
+ if (os.S.ISREG(stat.mode) or os.S.ISCHR(stat.mode) or os.S.ISSOCK(stat.mode)) {
+ if (this.destination_file_store.is_atty orelse false) {
+ this.doCopyFileRange(.sendfile, true) catch {};
+ } else {
+ this.doCopyFileRange(.sendfile, false) catch {};
+ }
+
+ this.doClose();
+ return;
+ }
+
+ this.system_error = unsupported_non_regular_file_error;
+ this.doClose();
+ return;
+ }
+
+ if (comptime Environment.isMac) {
+ this.doFCopyFile() catch {
+ this.doClose();
+
+ return;
+ };
+ if (stat.size != 0 and @intCast(SizeType, stat.size) > this.max_length) {
+ _ = darwin.ftruncate(this.destination_fd, @intCast(std.os.off_t, this.max_length));
+ }
+
+ this.doClose();
+ } else {
+ @compileError("TODO: implement copyfile");
+ }
+ }
+ };
+ };
+
+ pub const FileStore = struct {
+ pathlike: JSC.Node.PathOrFileDescriptor,
+ mime_type: HTTPClient.MimeType = HTTPClient.MimeType.other,
+ is_atty: ?bool = null,
+ mode: JSC.Node.Mode = 0,
+ seekable: ?bool = null,
+ max_size: SizeType = 0,
+
+ pub fn init(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?HTTPClient.MimeType) FileStore {
+ return .{ .pathlike = pathlike, .mime_type = mime_type orelse HTTPClient.MimeType.other };
+ }
+ };
+
+ pub const ByteStore = struct {
+ ptr: [*]u8 = undefined,
+ len: SizeType = 0,
+ cap: SizeType = 0,
+ allocator: std.mem.Allocator,
+
+ pub fn init(bytes: []u8, allocator: std.mem.Allocator) ByteStore {
+ return .{
+ .ptr = bytes.ptr,
+ .len = @truncate(SizeType, bytes.len),
+ .cap = @truncate(SizeType, bytes.len),
+ .allocator = allocator,
+ };
+ }
+
+ pub fn fromArrayList(list: std.ArrayListUnmanaged(u8), allocator: std.mem.Allocator) !*ByteStore {
+ return ByteStore.init(list.items, allocator);
+ }
+
+ pub fn slice(this: ByteStore) []u8 {
+ return this.ptr[0..this.len];
+ }
+
+ pub fn deinit(this: *ByteStore) void {
+ this.allocator.free(this.ptr[0..this.cap]);
+ }
+
+ pub fn asArrayList(this: ByteStore) std.ArrayListUnmanaged(u8) {
+ return this.asArrayListLeak();
+ }
+
+ pub fn asArrayListLeak(this: ByteStore) std.ArrayListUnmanaged(u8) {
+ return .{
+ .items = this.ptr[0..this.len],
+ .capacity = this.cap,
+ };
+ }
+ };
+
+ pub const Constructor = JSC.NewConstructor(
+ Blob,
+ .{
+ .constructor = .{ .rfn = constructor },
+ },
+ .{},
+ );
+
+ pub const Class = NewClass(
+ Blob,
+ .{ .name = "Blob" },
+ .{ .finalize = finalize, .text = .{
+ .rfn = getText,
+ }, .json = .{
+ .rfn = getJSON,
+ }, .arrayBuffer = .{
+ .rfn = getArrayBuffer,
+ }, .slice = .{
+ .rfn = getSlice,
+ }, .stream = .{
+ .rfn = getStream,
+ } },
+ .{
+ .@"type" = .{
+ .get = getType,
+ .set = setType,
+ },
+ .@"size" = .{
+ .get = getSize,
+ .ro = true,
+ },
+ },
+ );
+
+ pub fn getStream(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) JSC.C.JSValueRef {
+ var recommended_chunk_size: SizeType = 0;
+ if (arguments.len > 0) {
+ if (!JSValue.c(arguments[0]).isNumber() and !JSValue.c(arguments[0]).isUndefinedOrNull()) {
+ JSC.throwInvalidArguments("chunkSize must be a number", .{}, ctx, exception);
+ return null;
+ }
+
+ recommended_chunk_size = @intCast(SizeType, @maximum(0, @truncate(i52, JSValue.c(arguments[0]).toInt64())));
+ }
+ return JSC.WebCore.ReadableStream.fromBlob(
+ ctx.ptr(),
+ this,
+ recommended_chunk_size,
+ ).asObjectRef();
+ }
+
+ fn promisified(
+ value: JSC.JSValue,
+ global: *JSGlobalObject,
+ ) JSC.JSValue {
+ if (value.isError()) {
+ return JSC.JSPromise.rejectedPromiseValue(global, value);
+ }
+
+ if (value.jsType() == .JSPromise)
+ return value;
+
+ return JSC.JSPromise.resolvedPromiseValue(global, value);
+ }
+
+ pub fn getText(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) JSC.C.JSObjectRef {
+ return promisified(this.toString(ctx.ptr(), .clone), ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getTextTransfer(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ ) JSC.C.JSObjectRef {
+ return promisified(this.toString(ctx.ptr(), .transfer), ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getJSON(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) JSC.C.JSObjectRef {
+ return promisified(this.toJSON(ctx.ptr(), .share), ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getArrayBufferTransfer(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ ) JSC.C.JSObjectRef {
+ return promisified(this.toArrayBuffer(ctx.ptr(), .transfer), ctx.ptr()).asObjectRef();
+ }
+
+ pub fn getArrayBuffer(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) JSC.C.JSObjectRef {
+ return promisified(this.toArrayBuffer(ctx.ptr(), .clone), ctx.ptr()).asObjectRef();
+ }
+
+ /// https://w3c.github.io/FileAPI/#slice-method-algo
+ /// The slice() method returns a new Blob object with bytes ranging from the
+ /// optional start parameter up to but not including the optional end
+ /// parameter, and with a type attribute that is the value of the optional
+ /// contentType parameter. It must act as follows:
+ pub fn getSlice(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ args: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) JSC.C.JSObjectRef {
+ if (this.size == 0) {
+ return constructor(ctx, null, &[_]js.JSValueRef{}, exception);
+ }
+ // If the optional start parameter is not used as a parameter when making this call, let relativeStart be 0.
+ var relativeStart: i64 = 0;
+
+ // If the optional end parameter is not used as a parameter when making this call, let relativeEnd be size.
+ var relativeEnd: i64 = @intCast(i64, this.size);
+
+ var args_iter = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), args);
+ if (args_iter.nextEat()) |start_| {
+ const start = start_.toInt64();
+ if (start < 0) {
+ // If the optional start parameter is negative, let relativeStart be start + size.
+ relativeStart = @intCast(i64, @maximum(start + @intCast(i64, this.size), 0));
+ } else {
+ // Otherwise, let relativeStart be start.
+ relativeStart = @minimum(@intCast(i64, start), @intCast(i64, this.size));
+ }
+ }
+
+ if (args_iter.nextEat()) |end_| {
+ const end = end_.toInt64();
+ // If end is negative, let relativeEnd be max((size + end), 0).
+ if (end < 0) {
+ // If the optional start parameter is negative, let relativeStart be start + size.
+ relativeEnd = @intCast(i64, @maximum(end + @intCast(i64, this.size), 0));
+ } else {
+ // Otherwise, let relativeStart be start.
+ relativeEnd = @minimum(@intCast(i64, end), @intCast(i64, this.size));
+ }
+ }
+
+ var content_type: string = "";
+ if (args_iter.nextEat()) |content_type_| {
+ if (content_type_.isString()) {
+ var zig_str = content_type_.getZigString(ctx.ptr());
+ var slicer = zig_str.toSlice(bun.default_allocator);
+ defer slicer.deinit();
+ var slice = slicer.slice();
+ var content_type_buf = getAllocator(ctx).alloc(u8, slice.len) catch unreachable;
+ content_type = strings.copyLowercase(slice, content_type_buf);
+ }
+ }
+
+ const len = @intCast(SizeType, @maximum(relativeEnd - relativeStart, 0));
+
+ // This copies over the is_all_ascii flag
+ // which is okay because this will only be a <= slice
+ var blob = this.dupe();
+ blob.offset = @intCast(SizeType, relativeStart);
+ blob.size = len;
+ blob.content_type = content_type;
+ blob.content_type_allocated = content_type.len > 0;
+
+ var blob_ = getAllocator(ctx).create(Blob) catch unreachable;
+ blob_.* = blob;
+ blob_.allocator = getAllocator(ctx);
+ return Blob.Class.make(ctx, blob_);
+ }
+
+ pub fn getType(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return ZigString.init(this.content_type).toValue(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn setType(
+ this: *Blob,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ value: js.JSValueRef,
+ _: js.ExceptionRef,
+ ) bool {
+ var zig_str = JSValue.fromRef(value).getZigString(ctx.ptr());
+ if (zig_str.is16Bit())
+ return false;
+
+ var slice = zig_str.trimmedSlice();
+ if (strings.eql(slice, this.content_type))
+ return true;
+
+ const prev_content_type = this.content_type;
+ {
+ defer if (this.content_type_allocated) bun.default_allocator.free(prev_content_type);
+ var content_type_buf = getAllocator(ctx).alloc(u8, slice.len) catch unreachable;
+ this.content_type = strings.copyLowercase(slice, content_type_buf);
+ }
+
+ this.content_type_allocated = true;
+ return true;
+ }
+
+ pub fn getSize(
+ this: *Blob,
+ _: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ if (this.size == Blob.max_size) {
+ this.resolveSize();
+ if (this.size == Blob.max_size and this.store != null) {
+ return JSValue.jsNumberFromChar(0).asRef();
+ }
+ }
+
+ if (this.size < std.math.maxInt(i32)) {
+ return JSValue.jsNumber(this.size).asRef();
+ }
+
+ return JSC.JSValue.jsNumberFromUint64(this.size).asRef();
+ }
+
+ pub fn resolveSize(this: *Blob) void {
+ if (this.store) |store| {
+ if (store.data == .bytes) {
+ const offset = this.offset;
+ const store_size = store.size();
+ if (store_size != Blob.max_size) {
+ this.offset = @minimum(store_size, offset);
+ this.size = store_size - offset;
+ }
+ }
+ } else {
+ this.size = 0;
+ }
+ }
+
+ pub fn constructor(
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ args: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var blob: Blob = undefined;
+ switch (args.len) {
+ 0 => {
+ var empty: []u8 = &[_]u8{};
+ blob = Blob.init(empty, getAllocator(ctx), ctx.ptr());
+ },
+ else => {
+ blob = fromJS(ctx.ptr(), JSValue.fromRef(args[0]), false, true) catch |err| {
+ if (err == error.InvalidArguments) {
+ JSC.JSError(getAllocator(ctx), "new Blob() expects an Array", .{}, ctx, exception);
+ return null;
+ }
+ JSC.JSError(getAllocator(ctx), "out of memory :(", .{}, ctx, exception);
+ return null;
+ };
+
+ if (args.len > 1) {
+ var options = JSValue.fromRef(args[1]);
+ if (options.isCell()) {
+ // type, the ASCII-encoded string in lower case
+ // representing the media type of the Blob.
+ // Normative conditions for this member are provided
+ // in the § 3.1 Constructors.
+ if (options.get(ctx.ptr(), "type")) |content_type| {
+ if (content_type.isString()) {
+ var content_type_str = content_type.getZigString(ctx.ptr());
+ if (!content_type_str.is16Bit()) {
+ var slice = content_type_str.trimmedSlice();
+ var content_type_buf = getAllocator(ctx).alloc(u8, slice.len) catch unreachable;
+ blob.content_type = strings.copyLowercase(slice, content_type_buf);
+ blob.content_type_allocated = true;
+ }
+ }
+ }
+ }
+ }
+
+ if (blob.content_type.len == 0) {
+ blob.content_type = "";
+ }
+ },
+ }
+
+ var blob_ = getAllocator(ctx).create(Blob) catch unreachable;
+ blob_.* = blob;
+ blob_.allocator = getAllocator(ctx);
+ return Blob.Class.make(ctx, blob_);
+ }
+
+ pub fn finalize(this: *Blob) void {
+ this.deinit();
+ }
+
+ pub fn initWithAllASCII(bytes: []u8, allocator: std.mem.Allocator, globalThis: *JSGlobalObject, is_all_ascii: bool) Blob {
+ // avoid allocating a Blob.Store if the buffer is actually empty
+ var store: ?*Blob.Store = null;
+ if (bytes.len > 0) {
+ store = Blob.Store.init(bytes, allocator) catch unreachable;
+ store.?.is_all_ascii = is_all_ascii;
+ }
+ return Blob{
+ .size = @truncate(SizeType, bytes.len),
+ .store = store,
+ .allocator = null,
+ .content_type = "",
+ .globalThis = globalThis,
+ .is_all_ascii = is_all_ascii,
+ };
+ }
+
+ pub fn init(bytes: []u8, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) Blob {
+ return Blob{
+ .size = @truncate(SizeType, bytes.len),
+ .store = if (bytes.len > 0)
+ Blob.Store.init(bytes, allocator) catch unreachable
+ else
+ null,
+ .allocator = null,
+ .content_type = "",
+ .globalThis = globalThis,
+ };
+ }
+
+ pub fn initWithStore(store: *Blob.Store, globalThis: *JSGlobalObject) Blob {
+ return Blob{
+ .size = store.size(),
+ .store = store,
+ .allocator = null,
+ .content_type = if (store.data == .file)
+ store.data.file.mime_type.value
+ else
+ "",
+ .globalThis = globalThis,
+ };
+ }
+
+ pub fn initEmpty(globalThis: *JSGlobalObject) Blob {
+ return Blob{
+ .size = 0,
+ .store = null,
+ .allocator = null,
+ .content_type = "",
+ .globalThis = globalThis,
+ };
+ }
+
+ // Transferring doesn't change the reference count
+ // It is a move
+ inline fn transfer(this: *Blob) void {
+ this.store = null;
+ }
+
+ pub fn detach(this: *Blob) void {
+ if (this.store != null) this.store.?.deref();
+ this.store = null;
+ }
+
+ /// This does not duplicate
+ /// This creates a new view
+ /// and increment the reference count
+ pub fn dupe(this: *const Blob) Blob {
+ if (this.store != null) this.store.?.ref();
+ var duped = this.*;
+ duped.allocator = null;
+ return duped;
+ }
+
+ pub fn deinit(this: *Blob) void {
+ this.detach();
+
+ if (this.allocator) |alloc| {
+ this.allocator = null;
+ alloc.destroy(this);
+ }
+ }
+
+ pub fn sharedView(this: *const Blob) []const u8 {
+ if (this.size == 0 or this.store == null) return "";
+ var slice_ = this.store.?.sharedView();
+ if (slice_.len == 0) return "";
+ slice_ = slice_[this.offset..];
+
+ return slice_[0..@minimum(slice_.len, @as(usize, this.size))];
+ }
+
+ pub fn view(this: *const Blob) []const u8 {
+ if (this.size == 0 or this.store == null) return "";
+ return this.store.?.sharedView()[this.offset..][0..this.size];
+ }
+
+ pub const Lifetime = JSC.WebCore.Lifetime;
+ pub fn setIsASCIIFlag(this: *Blob, is_all_ascii: bool) void {
+ this.is_all_ascii = is_all_ascii;
+ // if this Blob represents the entire binary data
+ // which will be pretty common
+ // we can update the store's is_all_ascii flag
+ // and any other Blob that points to the same store
+ // can skip checking the encoding
+ if (this.size > 0 and this.offset == 0 and this.store.?.data == .bytes) {
+ this.store.?.is_all_ascii = is_all_ascii;
+ }
+ }
+
+ pub fn NewReadFileHandler(comptime Function: anytype, comptime lifetime: Lifetime) type {
+ return struct {
+ context: Blob,
+ promise: *JSPromise,
+ globalThis: *JSGlobalObject,
+ pub fn run(handler: *@This(), bytes_: Blob.Store.ReadFile.ResultType) void {
+ var promise = handler.promise;
+ var blob = handler.context;
+ blob.allocator = null;
+ var globalThis = handler.globalThis;
+ bun.default_allocator.destroy(handler);
+ switch (bytes_) {
+ .result => |result| {
+ const bytes = result.buf;
+ const is_temporary = result.is_temporary;
+ if (blob.size > 0)
+ blob.size = @minimum(@truncate(u32, bytes.len), blob.size);
+ if (!is_temporary) {
+ promise.resolve(globalThis, Function(&blob, globalThis, bytes, comptime lifetime));
+ } else {
+ promise.resolve(globalThis, Function(&blob, globalThis, bytes, .temporary));
+ }
+ },
+ .err => |err| {
+ promise.reject(globalThis, err.toErrorInstance(globalThis));
+ },
+ }
+ }
+ };
+ }
+
+ pub const WriteFilePromise = struct {
+ promise: *JSPromise,
+ globalThis: *JSGlobalObject,
+ pub fn run(handler: *@This(), count: Blob.Store.WriteFile.ResultType) void {
+ var promise = handler.promise;
+ var globalThis = handler.globalThis;
+ bun.default_allocator.destroy(handler);
+ switch (count) {
+ .err => |err| {
+ promise.reject(globalThis, err.toErrorInstance(globalThis));
+ },
+ .result => |wrote| {
+ promise.resolve(globalThis, JSC.JSValue.jsNumberFromUint64(wrote));
+ },
+ }
+ }
+ };
+
+ pub fn NewInternalReadFileHandler(comptime Context: type, comptime Function: anytype) type {
+ return struct {
+ pub fn run(handler: *anyopaque, bytes_: Store.ReadFile.ResultType) void {
+ Function(bun.cast(Context, handler), bytes_);
+ }
+ };
+ }
+
+ pub fn doReadFileInternal(this: *Blob, comptime Handler: type, ctx: Handler, comptime Function: anytype, global: *JSGlobalObject) void {
+ var file_read = Store.ReadFile.createWithCtx(
+ bun.default_allocator,
+ this.store.?,
+ ctx,
+ NewInternalReadFileHandler(Handler, Function).run,
+ this.offset,
+ this.size,
+ ) catch unreachable;
+ var read_file_task = Store.ReadFile.ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch unreachable;
+ read_file_task.schedule();
+ }
+
+ pub fn doReadFile(this: *Blob, comptime Function: anytype, comptime lifetime: Lifetime, global: *JSGlobalObject) JSValue {
+ const Handler = NewReadFileHandler(Function, lifetime);
+ var promise = JSPromise.create(global);
+
+ var handler = Handler{
+ .context = this.*,
+ .promise = promise,
+ .globalThis = global,
+ };
+
+ var ptr = bun.default_allocator.create(Handler) catch unreachable;
+ ptr.* = handler;
+ var file_read = Store.ReadFile.create(
+ bun.default_allocator,
+ this.store.?,
+ this.offset,
+ this.size,
+ *Handler,
+ ptr,
+ Handler.run,
+ ) catch unreachable;
+ var read_file_task = Store.ReadFile.ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch unreachable;
+ read_file_task.schedule();
+ return promise.asValue(global);
+ }
+
+ pub fn needsToReadFile(this: *const Blob) bool {
+ return this.store != null and this.store.?.data == .file;
+ }
+
+ pub fn toStringWithBytes(this: *Blob, global: *JSGlobalObject, buf: []const u8, comptime lifetime: Lifetime) JSValue {
+ // null == unknown
+ // false == can't be
+ const could_be_all_ascii = this.is_all_ascii orelse this.store.?.is_all_ascii;
+
+ if (could_be_all_ascii == null or !could_be_all_ascii.?) {
+ // if toUTF16Alloc returns null, it means there are no non-ASCII characters
+ // instead of erroring, invalid characters will become a U+FFFD replacement character
+ if (strings.toUTF16Alloc(bun.default_allocator, buf, false) catch unreachable) |external| {
+ if (lifetime != .temporary)
+ this.setIsASCIIFlag(false);
+
+ if (lifetime == .transfer) {
+ this.detach();
+ }
+
+ if (lifetime == .temporary) {
+ bun.default_allocator.free(bun.constStrToU8(buf));
+ }
+
+ return ZigString.toExternalU16(external.ptr, external.len, global);
+ }
+
+ if (lifetime != .temporary) this.setIsASCIIFlag(true);
+ }
+
+ switch (comptime lifetime) {
+ // strings are immutable
+ // we don't need to clone
+ .clone => {
+ this.store.?.ref();
+ return ZigString.init(buf).external(global, this.store.?, Store.external);
+ },
+ .transfer => {
+ var store = this.store.?;
+ this.transfer();
+ return ZigString.init(buf).external(global, store, Store.external);
+ },
+ // strings are immutable
+ // sharing isn't really a thing
+ .share => {
+ this.store.?.ref();
+ return ZigString.init(buf).external(global, this.store.?, Store.external);
+ },
+ .temporary => {
+ return ZigString.init(buf).toExternalValue(global);
+ },
+ }
+ }
+
+ pub fn toString(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) JSValue {
+ if (this.needsToReadFile()) {
+ return this.doReadFile(toStringWithBytes, lifetime, global);
+ }
+
+ const view_: []u8 =
+ bun.constStrToU8(this.sharedView());
+
+ if (view_.len == 0)
+ return ZigString.Empty.toValue(global);
+
+ return toStringWithBytes(this, global, view_, lifetime);
+ }
+
+ pub fn toJSON(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) JSValue {
+ if (this.needsToReadFile()) {
+ return this.doReadFile(toJSONWithBytes, lifetime, global);
+ }
+
+ var view_ = this.sharedView();
+
+ if (view_.len == 0)
+ return ZigString.Empty.toValue(global);
+
+ return toJSONWithBytes(this, global, view_, lifetime);
+ }
+
+ pub fn toJSONWithBytes(this: *Blob, global: *JSGlobalObject, buf: []const u8, comptime lifetime: Lifetime) JSValue {
+ // null == unknown
+ // false == can't be
+ const could_be_all_ascii = this.is_all_ascii orelse this.store.?.is_all_ascii;
+
+ if (could_be_all_ascii == null or !could_be_all_ascii.?) {
+ // if toUTF16Alloc returns null, it means there are no non-ASCII characters
+ if (strings.toUTF16Alloc(bun.default_allocator, buf, false) catch null) |external| {
+ if (comptime lifetime != .temporary) this.setIsASCIIFlag(false);
+ return ZigString.toExternalU16(external.ptr, external.len, global).parseJSON(global);
+ }
+
+ if (comptime lifetime != .temporary) this.setIsASCIIFlag(true);
+ }
+
+ if (comptime lifetime == .temporary) {
+ return ZigString.init(buf).toExternalValue(
+ global,
+ ).parseJSON(global);
+ } else {
+ return ZigString.init(buf).toValue(
+ global,
+ ).parseJSON(global);
+ }
+ }
+
+ pub fn toArrayBufferWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime) JSValue {
+ switch (comptime lifetime) {
+ .clone => {
+ var clone = bun.default_allocator.alloc(u8, buf.len) catch unreachable;
+ @memcpy(clone.ptr, buf.ptr, buf.len);
+
+ return JSC.ArrayBuffer.fromBytes(clone, .ArrayBuffer).toJS(global.ref(), null);
+ },
+ .share => {
+ this.store.?.ref();
+ return JSC.ArrayBuffer.fromBytes(buf, .ArrayBuffer).toJSWithContext(
+ global.ref(),
+ this.store.?,
+ JSC.BlobArrayBuffer_deallocator,
+ null,
+ );
+ },
+ .transfer => {
+ var store = this.store.?;
+ this.transfer();
+ return JSC.ArrayBuffer.fromBytes(buf, .ArrayBuffer).toJSWithContext(
+ global.ref(),
+ store,
+ JSC.BlobArrayBuffer_deallocator,
+ null,
+ );
+ },
+ .temporary => {
+ return JSC.ArrayBuffer.fromBytes(buf, .ArrayBuffer).toJS(
+ global.ref(),
+ null,
+ );
+ },
+ }
+ }
+
+ pub fn toArrayBuffer(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) JSValue {
+ if (this.needsToReadFile()) {
+ return this.doReadFile(toArrayBufferWithBytes, lifetime, global);
+ }
+
+ var view_ = this.sharedView();
+
+ if (view_.len == 0)
+ return JSC.ArrayBuffer.fromBytes(&[_]u8{}, .ArrayBuffer).toJS(global.ref(), null);
+
+ return toArrayBufferWithBytes(this, global, bun.constStrToU8(view_), lifetime);
+ }
+
+ pub inline fn fromJS(
+ global: *JSGlobalObject,
+ arg: JSValue,
+ comptime move: bool,
+ comptime require_array: bool,
+ ) anyerror!Blob {
+ return fromJSMovable(global, arg, move, require_array);
+ }
+
+ pub inline fn fromJSMove(global: *JSGlobalObject, arg: JSValue) anyerror!Blob {
+ return fromJSWithoutDeferGC(global, arg, true, false);
+ }
+
+ pub inline fn fromJSClone(global: *JSGlobalObject, arg: JSValue) anyerror!Blob {
+ return fromJSWithoutDeferGC(global, arg, false, true);
+ }
+
+ pub inline fn fromJSCloneOptionalArray(global: *JSGlobalObject, arg: JSValue) anyerror!Blob {
+ return fromJSWithoutDeferGC(global, arg, false, false);
+ }
+
+ fn fromJSMovable(
+ global: *JSGlobalObject,
+ arg: JSValue,
+ comptime move: bool,
+ comptime require_array: bool,
+ ) anyerror!Blob {
+ const FromJSFunction = if (comptime move and !require_array)
+ fromJSMove
+ else if (!require_array)
+ fromJSCloneOptionalArray
+ else
+ fromJSClone;
+ const DeferCtx = struct {
+ args: std.meta.ArgsTuple(@TypeOf(FromJSFunction)),
+ ret: anyerror!Blob = undefined,
+
+ pub fn run(ctx: ?*anyopaque) callconv(.C) void {
+ var that = bun.cast(*@This(), ctx.?);
+ that.ret = @call(.{}, FromJSFunction, that.args);
+ }
+ };
+ var ctx = DeferCtx{
+ .args = .{
+ global,
+ arg,
+ },
+ .ret = undefined,
+ };
+ global.vm().deferGC(&ctx, DeferCtx.run);
+ return ctx.ret;
+ }
+
+ fn fromJSWithoutDeferGC(
+ global: *JSGlobalObject,
+ arg: JSValue,
+ comptime move: bool,
+ comptime require_array: bool,
+ ) anyerror!Blob {
+ var current = arg;
+ if (current.isUndefinedOrNull()) {
+ return Blob{ .globalThis = global };
+ }
+
+ var top_value = current;
+ var might_only_be_one_thing = false;
+ switch (current.jsTypeLoose()) {
+ .Array, .DerivedArray => {
+ var top_iter = JSC.JSArrayIterator.init(current, global);
+ might_only_be_one_thing = top_iter.len == 1;
+ if (top_iter.len == 0) {
+ return Blob{ .globalThis = global };
+ }
+ if (might_only_be_one_thing) {
+ top_value = top_iter.next().?;
+ }
+ },
+ else => {
+ might_only_be_one_thing = true;
+ if (require_array) {
+ return error.InvalidArguments;
+ }
+ },
+ }
+
+ if (might_only_be_one_thing or !move) {
+
+ // Fast path: one item, we don't need to join
+ switch (top_value.jsTypeLoose()) {
+ .Cell,
+ .NumberObject,
+ JSC.JSValue.JSType.String,
+ JSC.JSValue.JSType.StringObject,
+ JSC.JSValue.JSType.DerivedStringObject,
+ => {
+ var sliced = top_value.toSlice(global, bun.default_allocator);
+ const is_all_ascii = !sliced.allocated;
+ if (!sliced.allocated and sliced.len > 0) {
+ sliced.ptr = @ptrCast([*]const u8, (try bun.default_allocator.dupe(u8, sliced.slice())).ptr);
+ sliced.allocated = true;
+ }
+
+ return Blob.initWithAllASCII(bun.constStrToU8(sliced.slice()), bun.default_allocator, global, is_all_ascii);
+ },
+
+ JSC.JSValue.JSType.ArrayBuffer,
+ JSC.JSValue.JSType.Int8Array,
+ JSC.JSValue.JSType.Uint8Array,
+ JSC.JSValue.JSType.Uint8ClampedArray,
+ JSC.JSValue.JSType.Int16Array,
+ JSC.JSValue.JSType.Uint16Array,
+ JSC.JSValue.JSType.Int32Array,
+ JSC.JSValue.JSType.Uint32Array,
+ JSC.JSValue.JSType.Float32Array,
+ JSC.JSValue.JSType.Float64Array,
+ JSC.JSValue.JSType.BigInt64Array,
+ JSC.JSValue.JSType.BigUint64Array,
+ JSC.JSValue.JSType.DataView,
+ => {
+ var buf = try bun.default_allocator.dupe(u8, top_value.asArrayBuffer(global).?.byteSlice());
+
+ return Blob.init(buf, bun.default_allocator, global);
+ },
+
+ else => {
+ if (JSC.C.JSObjectGetPrivate(top_value.asObjectRef())) |priv| {
+ var data = JSC.JSPrivateDataPtr.from(priv);
+ switch (data.tag()) {
+ .Blob => {
+ var blob: *Blob = data.as(Blob);
+ if (comptime move) {
+ var _blob = blob.*;
+ _blob.allocator = null;
+ blob.transfer();
+ return _blob;
+ } else {
+ return blob.dupe();
+ }
+ },
+
+ else => return Blob.initEmpty(global),
+ }
+ }
+ },
+ }
+ }
+
+ var stack_allocator = std.heap.stackFallback(1024, bun.default_allocator);
+ var stack_mem_all = stack_allocator.get();
+ var stack: std.ArrayList(JSValue) = std.ArrayList(JSValue).init(stack_mem_all);
+ var joiner = StringJoiner{ .use_pool = false, .node_allocator = stack_mem_all };
+ var could_have_non_ascii = false;
+
+ defer if (stack_allocator.fixed_buffer_allocator.end_index >= 1024) stack.deinit();
+
+ while (true) {
+ switch (current.jsTypeLoose()) {
+ .NumberObject,
+ JSC.JSValue.JSType.String,
+ JSC.JSValue.JSType.StringObject,
+ JSC.JSValue.JSType.DerivedStringObject,
+ => {
+ var sliced = current.toSlice(global, bun.default_allocator);
+ could_have_non_ascii = could_have_non_ascii or sliced.allocated;
+ joiner.append(
+ sliced.slice(),
+ 0,
+ if (sliced.allocated) sliced.allocator else null,
+ );
+ },
+
+ .Array, .DerivedArray => {
+ var iter = JSC.JSArrayIterator.init(current, global);
+ try stack.ensureUnusedCapacity(iter.len);
+ var any_arrays = false;
+ while (iter.next()) |item| {
+ if (item.isUndefinedOrNull()) continue;
+
+ // When it's a string or ArrayBuffer inside an array, we can avoid the extra push/pop
+ // we only really want this for nested arrays
+ // However, we must preserve the order
+ // That means if there are any arrays
+ // we have to restart the loop
+ if (!any_arrays) {
+ switch (item.jsTypeLoose()) {
+ .NumberObject,
+ .Cell,
+ JSC.JSValue.JSType.String,
+ JSC.JSValue.JSType.StringObject,
+ JSC.JSValue.JSType.DerivedStringObject,
+ => {
+ var sliced = item.toSlice(global, bun.default_allocator);
+ could_have_non_ascii = could_have_non_ascii or sliced.allocated;
+ joiner.append(
+ sliced.slice(),
+ 0,
+ if (sliced.allocated) sliced.allocator else null,
+ );
+ continue;
+ },
+ JSC.JSValue.JSType.ArrayBuffer,
+ JSC.JSValue.JSType.Int8Array,
+ JSC.JSValue.JSType.Uint8Array,
+ JSC.JSValue.JSType.Uint8ClampedArray,
+ JSC.JSValue.JSType.Int16Array,
+ JSC.JSValue.JSType.Uint16Array,
+ JSC.JSValue.JSType.Int32Array,
+ JSC.JSValue.JSType.Uint32Array,
+ JSC.JSValue.JSType.Float32Array,
+ JSC.JSValue.JSType.Float64Array,
+ JSC.JSValue.JSType.BigInt64Array,
+ JSC.JSValue.JSType.BigUint64Array,
+ JSC.JSValue.JSType.DataView,
+ => {
+ could_have_non_ascii = true;
+ var buf = item.asArrayBuffer(global).?;
+ joiner.append(buf.byteSlice(), 0, null);
+ continue;
+ },
+ .Array, .DerivedArray => {
+ any_arrays = true;
+ could_have_non_ascii = true;
+ break;
+ },
+ else => {
+ if (JSC.C.JSObjectGetPrivate(item.asObjectRef())) |priv| {
+ var data = JSC.JSPrivateDataPtr.from(priv);
+ switch (data.tag()) {
+ .Blob => {
+ var blob: *Blob = data.as(Blob);
+ could_have_non_ascii = could_have_non_ascii or !(blob.is_all_ascii orelse false);
+ joiner.append(blob.sharedView(), 0, null);
+ continue;
+ },
+ else => {},
+ }
+ }
+ },
+ }
+ }
+
+ stack.appendAssumeCapacity(item);
+ }
+ },
+
+ JSC.JSValue.JSType.ArrayBuffer,
+ JSC.JSValue.JSType.Int8Array,
+ JSC.JSValue.JSType.Uint8Array,
+ JSC.JSValue.JSType.Uint8ClampedArray,
+ JSC.JSValue.JSType.Int16Array,
+ JSC.JSValue.JSType.Uint16Array,
+ JSC.JSValue.JSType.Int32Array,
+ JSC.JSValue.JSType.Uint32Array,
+ JSC.JSValue.JSType.Float32Array,
+ JSC.JSValue.JSType.Float64Array,
+ JSC.JSValue.JSType.BigInt64Array,
+ JSC.JSValue.JSType.BigUint64Array,
+ JSC.JSValue.JSType.DataView,
+ => {
+ var buf = current.asArrayBuffer(global).?;
+ joiner.append(buf.slice(), 0, null);
+ could_have_non_ascii = true;
+ },
+
+ else => {
+ outer: {
+ if (JSC.C.JSObjectGetPrivate(current.asObjectRef())) |priv| {
+ var data = JSC.JSPrivateDataPtr.from(priv);
+ switch (data.tag()) {
+ .Blob => {
+ var blob: *Blob = data.as(Blob);
+ could_have_non_ascii = could_have_non_ascii or !(blob.is_all_ascii orelse false);
+ joiner.append(blob.sharedView(), 0, null);
+ break :outer;
+ },
+ else => {},
+ }
+ }
+
+ var sliced = current.toSlice(global, bun.default_allocator);
+ could_have_non_ascii = could_have_non_ascii or sliced.allocated;
+ joiner.append(
+ sliced.slice(),
+ 0,
+ if (sliced.allocated) sliced.allocator else null,
+ );
+ }
+ },
+ }
+ current = stack.popOrNull() orelse break;
+ }
+
+ var joined = try joiner.done(bun.default_allocator);
+
+ if (!could_have_non_ascii) {
+ return Blob.initWithAllASCII(joined, bun.default_allocator, global, true);
+ }
+ return Blob.init(joined, bun.default_allocator, global);
+ }
+};
+
+// https://developer.mozilla.org/en-US/docs/Web/API/Body
+pub const Body = struct {
+ init: Init = Init{ .headers = null, .status_code = 200 },
+ value: Value = Value.empty,
+
+ pub inline fn len(this: *const Body) usize {
+ return this.slice().len;
+ }
+
+ pub fn slice(this: *const Body) []const u8 {
+ return this.value.slice();
+ }
+
+ pub fn use(this: *Body) Blob {
+ return this.value.use();
+ }
+
+ pub fn clone(this: Body, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) Body {
+ return Body{
+ .init = this.init.clone(globalThis),
+ .value = this.value.clone(allocator),
+ };
+ }
+
+ pub fn writeFormat(this: *const Body, formatter: *JSC.Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void {
+ const Writer = @TypeOf(writer);
+
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("bodyUsed: ");
+ formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.value == .Used), .BooleanObject, enable_ansi_colors);
+ try formatter.printComma(Writer, writer, enable_ansi_colors);
+ try writer.writeAll("\n");
+
+ // if (this.init.headers) |headers| {
+ // try formatter.writeIndent(Writer, writer);
+ // try writer.writeAll("headers: ");
+ // try headers.leak().writeFormat(formatter, writer, comptime enable_ansi_colors);
+ // try writer.writeAll("\n");
+ // }
+
+ try formatter.writeIndent(Writer, writer);
+ try writer.writeAll("status: ");
+ formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(this.init.status_code), .NumberObject, enable_ansi_colors);
+ }
+
+ pub fn deinit(this: *Body, _: std.mem.Allocator) void {
+ if (this.init.headers) |headers| {
+ headers.deref();
+ this.init.headers = null;
+ }
+ this.value.deinit();
+ }
+
+ pub const Init = struct {
+ headers: ?*FetchHeaders = null,
+ status_code: u16,
+ method: Method = Method.GET,
+
+ pub fn clone(this: Init, _: *JSGlobalObject) Init {
+ var that = this;
+ var headers = this.headers;
+ if (headers) |head| {
+ that.headers = head.cloneThis();
+ }
+
+ return that;
+ }
+
+ pub fn init(_: std.mem.Allocator, ctx: js.JSContextRef, init_ref: js.JSValueRef) !?Init {
+ var result = Init{ .status_code = 200 };
+ var array = js.JSObjectCopyPropertyNames(ctx, init_ref);
+ defer js.JSPropertyNameArrayRelease(array);
+ const count = js.JSPropertyNameArrayGetCount(array);
+
+ var i: usize = 0;
+ while (i < count) : (i += 1) {
+ var property_name_ref = js.JSPropertyNameArrayGetNameAtIndex(array, i);
+ switch (js.JSStringGetLength(property_name_ref)) {
+ "headers".len => {
+ if (js.JSStringIsEqualToUTF8CString(property_name_ref, "headers")) {
+ // only support headers as an object for now.
+ if (js.JSObjectGetProperty(ctx, init_ref, property_name_ref, null)) |header_prop| {
+ const header_val = JSValue.fromRef(header_prop);
+ if (header_val.as(FetchHeaders)) |orig| {
+ result.headers = orig.cloneThis();
+ } else {
+ result.headers = FetchHeaders.createFromJS(ctx.ptr(), header_val);
+ }
+ }
+ }
+ },
+
+ "method".len => {
+ if (js.JSStringIsEqualToUTF8CString(property_name_ref, "status")) {
+ var value_ref = js.JSObjectGetProperty(ctx, init_ref, property_name_ref, null);
+ var exception: js.JSValueRef = null;
+ const number = js.JSValueToNumber(ctx, value_ref, &exception);
+ if (exception != null or !std.math.isFinite(number)) continue;
+ result.status_code = @truncate(u16, @floatToInt(u64, number));
+ } else if (js.JSStringIsEqualToUTF8CString(property_name_ref, "method")) {
+ result.method = Method.which(
+ JSC.JSValue.fromRef(init_ref).get(ctx.ptr(), "method").?.getZigString(ctx.ptr()).slice(),
+ ) orelse Method.GET;
+ }
+ },
+ else => {},
+ }
+ }
+
+ if (result.headers == null and result.status_code < 200) return null;
+ return result;
+ }
+ };
+
+ pub const PendingValue = struct {
+ promise: ?JSValue = null,
+ readable: ?JSC.WebCore.ReadableStream = null,
+ // writable: JSC.WebCore.Sink
+
+ global: *JSGlobalObject,
+ task: ?*anyopaque = null,
+ /// runs after the data is available.
+ callback: ?fn (ctx: *anyopaque, value: *Value) void = null,
+ /// conditionally runs when requesting data
+ /// used in HTTP server to ignore request bodies unless asked for it
+ onPull: ?fn (ctx: *anyopaque) void = null,
+ deinit: bool = false,
+ action: Action = Action.none,
+
+ pub fn setPromise(value: *PendingValue, globalThis: *JSC.JSGlobalObject, action: Action) JSValue {
+ value.action = action;
+
+ if (value.readable) |*readable| {
+ // switch (readable.ptr) {
+ // .JavaScript
+ // }
+ switch (action) {
+ .getText, .getJSON, .getBlob, .getArrayBuffer => {
+ switch (readable.ptr) {
+ .Blob => unreachable,
+ else => {},
+ }
+ value.promise = switch (action) {
+ .getJSON => globalThis.readableStreamToJSON(readable.value),
+ .getArrayBuffer => globalThis.readableStreamToArrayBuffer(readable.value),
+ .getText => globalThis.readableStreamToText(readable.value),
+ .getBlob => globalThis.readableStreamToBlob(readable.value),
+ else => unreachable,
+ };
+ value.promise.?.ensureStillAlive();
+ readable.value.unprotect();
+
+ // js now owns the memory
+ value.readable = null;
+
+ return value.promise.?;
+ },
+ .none => {},
+ }
+ }
+
+ {
+ var promise = JSC.JSPromise.create(globalThis);
+ const promise_value = promise.asValue(globalThis);
+ value.promise = promise_value;
+
+ if (value.onPull) |onPull| {
+ value.onPull = null;
+ onPull(value.task.?);
+ }
+ return promise_value;
+ }
+ }
+
+ pub const Action = enum {
+ none,
+ getText,
+ getJSON,
+ getArrayBuffer,
+ getBlob,
+ };
+ };
+
+ pub const Value = union(Tag) {
+ Blob: Blob,
+ Locked: PendingValue,
+ Used: void,
+ Empty: void,
+ Error: JSValue,
+
+ pub const Tag = enum {
+ Blob,
+ Locked,
+ Used,
+ Empty,
+ Error,
+ };
+
+ pub const empty = Value{ .Empty = .{} };
+
+ pub fn fromReadableStream(readable: JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) Value {
+ if (readable.isLocked(globalThis)) {
+ return .{ .Error = ZigString.init("Cannot use a locked ReadableStream").toErrorInstance(globalThis) };
+ }
+
+ readable.value.protect();
+ return .{
+ .Locked = .{
+ .readable = readable,
+ .global = globalThis,
+ },
+ };
+ }
+
+ pub fn resolve(this: *Value, new: *Value, global: *JSGlobalObject) void {
+ if (this.* == .Locked) {
+ var locked = &this.Locked;
+ if (locked.readable) |readable| {
+ readable.done();
+ locked.readable = null;
+ }
+
+ if (locked.callback) |callback| {
+ locked.callback = null;
+ callback(locked.task.?, new);
+ }
+
+ if (locked.promise) |promise| {
+ locked.promise = null;
+ var blob = new.use();
+
+ switch (locked.action) {
+ .getText => {
+ promise.asPromise().?.resolve(global, JSValue.fromRef(blob.getTextTransfer(global.ref())));
+ },
+ .getJSON => {
+ promise.asPromise().?.resolve(global, blob.toJSON(global, .share));
+ blob.detach();
+ },
+ .getArrayBuffer => {
+ promise.asPromise().?.resolve(global, JSValue.fromRef(blob.getArrayBufferTransfer(global.ref())));
+ },
+ .getBlob => {
+ var ptr = bun.default_allocator.create(Blob) catch unreachable;
+ ptr.* = blob;
+ ptr.allocator = bun.default_allocator;
+ promise.asPromise().?.resolve(global, JSC.JSValue.fromRef(Blob.Class.make(global.ref(), ptr)));
+ },
+ else => {
+ var ptr = bun.default_allocator.create(Blob) catch unreachable;
+ ptr.* = blob;
+ ptr.allocator = bun.default_allocator;
+ promise.asInternalPromise().?.resolve(global, JSC.JSValue.fromRef(Blob.Class.make(global.ref(), ptr)));
+ },
+ }
+ JSC.C.JSValueUnprotect(global.ref(), promise.asObjectRef());
+ }
+ }
+ }
+ pub fn slice(this: Value) []const u8 {
+ return switch (this) {
+ .Blob => this.Blob.sharedView(),
+ else => "",
+ };
+ }
+
+ pub fn use(this: *Value) Blob {
+ switch (this.*) {
+ .Blob => {
+ var new_blob = this.Blob;
+ std.debug.assert(new_blob.allocator == null); // owned by Body
+ this.* = .{ .Used = .{} };
+ return new_blob;
+ },
+ else => {
+ return Blob.initEmpty(undefined);
+ },
+ }
+ }
+
+ pub fn toErrorInstance(this: *Value, error_instance: JSC.JSValue, global: *JSGlobalObject) void {
+ if (this.* == .Locked) {
+ var locked = this.Locked;
+ locked.deinit = true;
+ if (locked.promise) |promise| {
+ if (promise.asInternalPromise()) |internal| {
+ internal.reject(global, error_instance);
+ } else if (promise.asPromise()) |internal| {
+ internal.reject(global, error_instance);
+ }
+ JSC.C.JSValueUnprotect(global.ref(), promise.asObjectRef());
+ locked.promise = null;
+ }
+
+ if (locked.readable) |readable| {
+ readable.done();
+ locked.readable = null;
+ }
+
+ this.* = .{ .Error = error_instance };
+ if (locked.callback) |callback| {
+ locked.callback = null;
+ callback(locked.task.?, this);
+ }
+ return;
+ }
+
+ this.* = .{ .Error = error_instance };
+ }
+
+ pub fn toErrorString(this: *Value, comptime err: string, global: *JSGlobalObject) void {
+ var error_str = ZigString.init(err);
+ var error_instance = error_str.toErrorInstance(global);
+ return this.toErrorInstance(error_instance, global);
+ }
+
+ pub fn toError(this: *Value, err: anyerror, global: *JSGlobalObject) void {
+ var error_str = ZigString.init(std.fmt.allocPrint(
+ bun.default_allocator,
+ "Error reading file {s}",
+ .{@errorName(err)},
+ ) catch unreachable);
+ error_str.mark();
+ var error_instance = error_str.toErrorInstance(global);
+ return this.toErrorInstance(error_instance, global);
+ }
+
+ pub fn deinit(this: *Value) void {
+ const tag = @as(Tag, this.*);
+ if (tag == .Locked) {
+ if (this.Locked.readable) |*readable| {
+ readable.done();
+ }
+
+ this.Locked.deinit = true;
+ return;
+ }
+
+ if (tag == .Blob) {
+ this.Blob.deinit();
+ this.* = Value.empty;
+ }
+
+ if (tag == .Error) {
+ JSC.C.JSValueUnprotect(VirtualMachine.vm.global.ref(), this.Error.asObjectRef());
+ }
+ }
+
+ pub fn clone(this: Value, _: std.mem.Allocator) Value {
+ if (this == .Blob) {
+ return Value{ .Blob = this.Blob.dupe() };
+ }
+
+ return Value{ .Empty = .{} };
+ }
+ };
+
+ pub fn @"404"(_: js.JSContextRef) Body {
+ return Body{
+ .init = Init{
+ .headers = null,
+ .status_code = 404,
+ },
+ .value = Value.empty,
+ };
+ }
+
+ pub fn @"200"(_: js.JSContextRef) Body {
+ return Body{
+ .init = Init{
+ .status_code = 200,
+ },
+ .value = Value.empty,
+ };
+ }
+
+ pub fn extract(ctx: js.JSContextRef, body_ref: js.JSObjectRef, exception: js.ExceptionRef) Body {
+ return extractBody(
+ ctx,
+ body_ref,
+ false,
+ null,
+ exception,
+ );
+ }
+
+ pub fn extractWithInit(ctx: js.JSContextRef, body_ref: js.JSObjectRef, init_ref: js.JSValueRef, exception: js.ExceptionRef) Body {
+ return extractBody(
+ ctx,
+ body_ref,
+ true,
+ init_ref,
+ exception,
+ );
+ }
+
+ // https://github.com/WebKit/webkit/blob/main/Source/WebCore/Modules/fetch/FetchBody.cpp#L45
+ inline fn extractBody(
+ ctx: js.JSContextRef,
+ body_ref: js.JSObjectRef,
+ comptime has_init: bool,
+ init_ref: js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) Body {
+ var body = Body{
+ .init = Init{ .headers = null, .status_code = 200 },
+ };
+ const value = JSC.JSValue.fromRef(body_ref);
+ var allocator = getAllocator(ctx);
+
+ if (comptime has_init) {
+ if (Init.init(allocator, ctx, init_ref.?)) |maybeInit| {
+ if (maybeInit) |init_| {
+ body.init = init_;
+ }
+ } else |_| {}
+ }
+
+ if (JSC.WebCore.ReadableStream.fromJS(value, ctx)) |readable| {
+ switch (readable.ptr) {
+ .Blob => |blob| {
+ body.value = .{
+ .Blob = Blob.initWithStore(blob.store, ctx),
+ };
+ blob.store.ref();
+
+ readable.done();
+
+ if (!blob.done) {
+ blob.done = true;
+ blob.deinit();
+ }
+ return body;
+ },
+ else => {},
+ }
+
+ body.value = Body.Value.fromReadableStream(readable, ctx);
+ return body;
+ }
+
+ body.value = .{
+ .Blob = Blob.fromJS(ctx.ptr(), value, true, false) catch |err| {
+ if (err == error.InvalidArguments) {
+ JSC.JSError(allocator, "Expected an Array", .{}, ctx, exception);
+ return body;
+ }
+
+ JSC.JSError(allocator, "Out of memory", .{}, ctx, exception);
+ return body;
+ },
+ };
+
+ std.debug.assert(body.value.Blob.allocator == null); // owned by Body
+
+ return body;
+ }
+};
+
+// https://developer.mozilla.org/en-US/docs/Web/API/Request
+pub const Request = struct {
+ url: ZigString = ZigString.Empty,
+ headers: ?*FetchHeaders = null,
+ body: Body.Value = Body.Value{ .Empty = .{} },
+ method: Method = Method.GET,
+ uws_request: ?*uws.Request = null,
+
+ pub fn fromRequestContext(ctx: *RequestContext, global: *JSGlobalObject) !Request {
+ var req = Request{
+ .url = ZigString.init(std.mem.span(ctx.getFullURL())),
+ .body = Body.Value.empty,
+ .method = ctx.method,
+ .headers = FetchHeaders.createFromPicoHeaders(global, ctx.request.headers),
+ };
+ req.url.mark();
+ return req;
+ }
+
+ pub fn mimeType(this: *const Request) string {
+ if (this.headers) |headers| {
+ // Remember, we always lowercase it
+ // hopefully doesn't matter here tho
+ if (headers.get("content-type")) |content_type| {
+ return content_type;
+ }
+ }
+
+ switch (this.body) {
+ .Blob => |blob| {
+ if (blob.content_type.len > 0) {
+ return blob.content_type;
+ }
+
+ return MimeType.other.value;
+ },
+ .Error, .Used, .Locked, .Empty => return MimeType.other.value,
+ }
+ }
+
+ pub const Constructor = JSC.NewConstructor(
+ Request,
+ .{
+ .constructor = .{ .rfn = constructor },
+ },
+ .{},
+ );
+
+ pub const Class = NewClass(
+ Request,
+ .{
+ .name = "Request",
+ .read_only = true,
+ },
+ .{
+ .finalize = finalize,
+ .text = .{
+ .rfn = Request.getText,
+ },
+ .json = .{
+ .rfn = Request.getJSON,
+ },
+ .arrayBuffer = .{
+ .rfn = Request.getArrayBuffer,
+ },
+ .blob = .{
+ .rfn = Request.getBlob,
+ },
+ .clone = .{
+ .rfn = Request.doClone,
+ },
+ },
+ .{
+ .@"cache" = .{
+ .@"get" = getCache,
+ .@"ro" = true,
+ },
+ .@"credentials" = .{
+ .@"get" = getCredentials,
+ .@"ro" = true,
+ },
+ .@"destination" = .{
+ .@"get" = getDestination,
+ .@"ro" = true,
+ },
+ .@"headers" = .{
+ .@"get" = getHeaders,
+ .@"ro" = true,
+ },
+ .@"integrity" = .{
+ .@"get" = getIntegrity,
+ .@"ro" = true,
+ },
+ .@"method" = .{
+ .@"get" = getMethod,
+ .@"ro" = true,
+ },
+ .@"mode" = .{
+ .@"get" = getMode,
+ .@"ro" = true,
+ },
+ .@"redirect" = .{
+ .@"get" = getRedirect,
+ .@"ro" = true,
+ },
+ .@"referrer" = .{
+ .@"get" = getReferrer,
+ .@"ro" = true,
+ },
+ .@"referrerPolicy" = .{
+ .@"get" = getReferrerPolicy,
+ .@"ro" = true,
+ },
+ .@"url" = .{
+ .@"get" = getUrl,
+ .@"ro" = true,
+ },
+ .@"bodyUsed" = .{
+ .@"get" = getBodyUsed,
+ .@"ro" = true,
+ },
+ },
+ );
+
+ pub fn getCache(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeString(ctx, ZigString.init(Properties.UTF8.default).toValueGC(ctx.ptr()).asRef());
+ }
+ pub fn getCredentials(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeString(ctx, ZigString.init(Properties.UTF8.include).toValueGC(ctx.ptr()).asRef());
+ }
+ pub fn getDestination(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeString(ctx, ZigString.init("").toValueGC(ctx.ptr()).asRef());
+ }
+
+ pub fn getIntegrity(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return ZigString.Empty.toValueGC(ctx.ptr()).asRef();
+ }
+ pub fn getMethod(
+ this: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ const string_contents: string = switch (this.method) {
+ .GET => Properties.UTF8.GET,
+ .HEAD => Properties.UTF8.HEAD,
+ .PATCH => Properties.UTF8.PATCH,
+ .PUT => Properties.UTF8.PUT,
+ .POST => Properties.UTF8.POST,
+ .OPTIONS => Properties.UTF8.OPTIONS,
+ else => "",
+ };
+
+ return ZigString.init(string_contents).toValue(ctx.ptr()).asRef();
+ }
+
+ pub fn getMode(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return ZigString.init(Properties.UTF8.navigate).toValue(ctx.ptr()).asRef();
+ }
+
+ pub fn finalize(this: *Request) void {
+ if (this.headers) |headers| {
+ headers.deref();
+ this.headers = null;
+ }
+
+ if (this.url.isGloballyAllocated()) {
+ bun.default_allocator.free(bun.constStrToU8(this.url.slice()));
+ }
+
+ bun.default_allocator.destroy(this);
+ }
+
+ pub fn getRedirect(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return ZigString.init(Properties.UTF8.follow).toValueGC(ctx.ptr()).asRef();
+ }
+ pub fn getReferrer(
+ this: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ if (this.headers) |headers_ref| {
+ if (headers_ref.get("referrer")) |referrer| {
+ return ZigString.init(referrer).toValueGC(ctx.ptr()).asRef();
+ }
+ }
+
+ return ZigString.init("").toValueGC(ctx.ptr()).asRef();
+ }
+ pub fn getReferrerPolicy(
+ _: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return ZigString.init("").toValueGC(ctx.ptr()).asRef();
+ }
+ pub fn getUrl(
+ this: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return this.url.toValueGC(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn constructor(
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSObjectRef {
+ var request = Request{};
+
+ switch (arguments.len) {
+ 0 => {},
+ 1 => {
+ request.url = JSC.JSValue.fromRef(arguments[0]).getZigString(ctx.ptr());
+ },
+ else => {
+ request.url = JSC.JSValue.fromRef(arguments[0]).getZigString(ctx.ptr());
+
+ if (Body.Init.init(getAllocator(ctx), ctx, arguments[1]) catch null) |req_init| {
+ request.headers = req_init.headers;
+ request.method = req_init.method;
+ }
+
+ if (JSC.JSValue.fromRef(arguments[1]).get(ctx.ptr(), "body")) |body_| {
+ if (Blob.fromJS(ctx.ptr(), body_, true, false)) |blob| {
+ if (blob.size > 0) {
+ request.body = Body.Value{ .Blob = blob };
+ }
+ } else |err| {
+ if (err == error.InvalidArguments) {
+ JSC.JSError(getAllocator(ctx), "Expected an Array", .{}, ctx, exception);
+ return null;
+ }
+
+ JSC.JSError(getAllocator(ctx), "Invalid Body", .{}, ctx, exception);
+ return null;
+ }
+ }
+ },
+ }
+
+ var request_ = getAllocator(ctx).create(Request) catch unreachable;
+ request_.* = request;
+ return Request.Class.make(
+ ctx,
+ request_,
+ );
+ }
+
+ pub fn getBodyValue(
+ this: *Request,
+ ) *Body.Value {
+ return &this.body;
+ }
+
+ pub fn getBodyUsed(
+ this: *Request,
+ _: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return JSC.JSValue.jsBoolean(this.body == .Used).asRef();
+ }
+
+ pub usingnamespace BlobInterface(@This());
+
+ pub fn doClone(
+ this: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ var cloned = this.clone(getAllocator(ctx), ctx.ptr());
+ return Request.Class.make(ctx, cloned);
+ }
+
+ pub fn getHeaders(
+ this: *Request,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ if (this.headers == null) {
+ if (this.uws_request) |req| {
+ this.headers = FetchHeaders.createFromUWS(ctx.ptr(), req);
+ } else {
+ this.headers = FetchHeaders.createEmpty();
+ }
+ }
+
+ return this.headers.?.toJS(ctx.ptr()).asObjectRef();
+ }
+
+ pub fn cloneInto(
+ this: *const Request,
+ req: *Request,
+ allocator: std.mem.Allocator,
+ globalThis: *JSGlobalObject,
+ ) void {
+ req.* = Request{
+ .body = this.body.clone(allocator),
+ .url = ZigString.init(allocator.dupe(u8, this.url.slice()) catch unreachable),
+ .method = this.method,
+ };
+ if (this.headers) |head| {
+ req.headers = head.cloneThis();
+ } else if (this.uws_request) |uws_req| {
+ req.headers = FetchHeaders.createFromUWS(globalThis, uws_req);
+ }
+ }
+
+ pub fn clone(this: *const Request, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) *Request {
+ var req = allocator.create(Request) catch unreachable;
+ this.cloneInto(req, allocator, globalThis);
+ return req;
+ }
+};
+
+fn BlobInterface(comptime Type: type) type {
+ return struct {
+ pub fn getText(
+ this: *Type,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ var value: *Body.Value = this.getBodyValue();
+ if (value.* == .Locked) {
+ return value.Locked.setPromise(ctx.ptr(), .getText).asObjectRef();
+ }
+
+ var blob = this.body.use();
+ return blob.getTextTransfer(ctx);
+ }
+
+ pub fn getJSON(
+ this: *Type,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ var value: *Body.Value = this.getBodyValue();
+ if (value.* == .Locked) {
+ return value.Locked.setPromise(ctx.ptr(), .getJSON).asObjectRef();
+ }
+
+ var blob = this.body.use();
+ return blob.getJSON(ctx, null, null, &.{}, exception);
+ }
+ pub fn getArrayBuffer(
+ this: *Type,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ var value: *Body.Value = this.getBodyValue();
+
+ if (value.* == .Locked) {
+ return value.Locked.setPromise(ctx.ptr(), .getArrayBuffer).asObjectRef();
+ }
+
+ var blob = this.body.use();
+ return blob.getArrayBufferTransfer(ctx);
+ }
+
+ pub fn getBlob(
+ this: *Type,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ var value: *Body.Value = this.getBodyValue();
+
+ if (value.* == .Locked) {
+ return value.Locked.setPromise(ctx.ptr(), .getBlob).asObjectRef();
+ }
+
+ var blob = this.body.use();
+ var ptr = getAllocator(ctx).create(Blob) catch unreachable;
+ ptr.* = blob;
+ blob.allocator = getAllocator(ctx);
+ return JSC.JSPromise.resolvedPromiseValue(ctx.ptr(), JSValue.fromRef(Blob.Class.make(ctx, ptr))).asObjectRef();
+ }
+
+ // pub fn getBody(
+ // this: *Type,
+ // ctx: js.JSContextRef,
+ // _: js.JSObjectRef,
+ // _: js.JSObjectRef,
+ // _: []const js.JSValueRef,
+ // _: js.ExceptionRef,
+ // ) js.JSValueRef {
+ // var value: *Body.Value = this.getBodyValue();
+
+ // switch (value.*) {
+ // .Empty => {},
+ // }
+ // }
+ };
+}
+
+// https://github.com/WebKit/WebKit/blob/main/Source/WebCore/workers/service/FetchEvent.h
+pub const FetchEvent = struct {
+ started_waiting_at: u64 = 0,
+ response: ?*Response = null,
+ request_context: ?*RequestContext = null,
+ request: Request,
+ pending_promise: ?*JSInternalPromise = null,
+
+ onPromiseRejectionCtx: *anyopaque = undefined,
+ onPromiseRejectionHandler: ?fn (ctx: *anyopaque, err: anyerror, fetch_event: *FetchEvent, value: JSValue) void = null,
+ rejected: bool = false,
+
+ pub const Class = NewClass(
+ FetchEvent,
+ .{
+ .name = "FetchEvent",
+ .read_only = true,
+ .ts = .{ .class = d.ts.class{ .interface = true } },
+ },
+ .{
+ .@"respondWith" = .{
+ .rfn = respondWith,
+ .ts = d.ts{
+ .tsdoc = "Render the response in the active HTTP request",
+ .@"return" = "void",
+ .args = &[_]d.ts.arg{
+ .{ .name = "response", .@"return" = "Response" },
+ },
+ },
+ },
+ .@"waitUntil" = waitUntil,
+ .finalize = finalize,
+ },
+ .{
+ .@"client" = .{
+ .@"get" = getClient,
+ .ro = true,
+ .ts = d.ts{
+ .tsdoc = "HTTP client metadata. This is not implemented yet, do not use.",
+ .@"return" = "undefined",
+ },
+ },
+ .@"request" = .{
+ .@"get" = getRequest,
+ .ro = true,
+ .ts = d.ts{
+ .tsdoc = "HTTP request",
+ .@"return" = "InstanceType<Request>",
+ },
+ },
+ },
+ );
+
+ pub fn finalize(
+ this: *FetchEvent,
+ ) void {
+ VirtualMachine.vm.allocator.destroy(this);
+ }
+
+ pub fn getClient(
+ _: *FetchEvent,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ Output.prettyErrorln("FetchEvent.client is not implemented yet - sorry!!", .{});
+ Output.flush();
+ return js.JSValueMakeUndefined(ctx);
+ }
+ pub fn getRequest(
+ this: *FetchEvent,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSStringRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ var req = bun.default_allocator.create(Request) catch unreachable;
+ req.* = this.request;
+
+ return Request.Class.make(
+ ctx,
+ req,
+ );
+ }
+
+ // https://developer.mozilla.org/en-US/docs/Web/API/FetchEvent/respondWith
+ pub fn respondWith(
+ this: *FetchEvent,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ arguments: []const js.JSValueRef,
+ exception: js.ExceptionRef,
+ ) js.JSValueRef {
+ var request_context = this.request_context orelse return js.JSValueMakeUndefined(ctx);
+ if (request_context.has_called_done) return js.JSValueMakeUndefined(ctx);
+ var globalThis = ctx.ptr();
+
+ // A Response or a Promise that resolves to a Response. Otherwise, a network error is returned to Fetch.
+ if (arguments.len == 0 or !Response.Class.isLoaded() or !js.JSValueIsObject(ctx, arguments[0])) {
+ JSError(getAllocator(ctx), "event.respondWith() must be a Response or a Promise<Response>.", .{}, ctx, exception);
+ request_context.sendInternalError(error.respondWithWasEmpty) catch {};
+ return js.JSValueMakeUndefined(ctx);
+ }
+
+ var arg = arguments[0];
+
+ if (JSValue.fromRef(arg).as(Response) == null) {
+ this.pending_promise = this.pending_promise orelse JSInternalPromise.resolvedPromise(globalThis, JSValue.fromRef(arguments[0]));
+ }
+
+ if (this.pending_promise) |promise| {
+ VirtualMachine.vm.event_loop.waitForPromise(promise);
+
+ switch (promise.status(ctx.ptr().vm())) {
+ .Fulfilled => {},
+ else => {
+ this.rejected = true;
+ this.pending_promise = null;
+ this.onPromiseRejectionHandler.?(
+ this.onPromiseRejectionCtx,
+ error.PromiseRejection,
+ this,
+ promise.result(globalThis.vm()),
+ );
+ return js.JSValueMakeUndefined(ctx);
+ },
+ }
+
+ arg = promise.result(ctx.ptr().vm()).asRef();
+ }
+
+ var response: *Response = GetJSPrivateData(Response, arg) orelse {
+ this.rejected = true;
+ this.pending_promise = null;
+ JSError(getAllocator(ctx), "event.respondWith() expects Response or Promise<Response>", .{}, ctx, exception);
+ this.onPromiseRejectionHandler.?(this.onPromiseRejectionCtx, error.RespondWithInvalidTypeInternal, this, JSValue.fromRef(exception.*));
+ return js.JSValueMakeUndefined(ctx);
+ };
+
+ defer {
+ if (!VirtualMachine.vm.had_errors) {
+ Output.printElapsed(@intToFloat(f64, (request_context.timer.lap())) / std.time.ns_per_ms);
+
+ Output.prettyError(
+ " <b>{s}<r><d> - <b>{d}<r> <d>transpiled, <d><b>{d}<r> <d>imports<r>\n",
+ .{
+ request_context.matched_route.?.name,
+ VirtualMachine.vm.transpiled_count,
+ VirtualMachine.vm.resolved_count,
+ },
+ );
+ }
+ }
+
+ defer this.pending_promise = null;
+ var needs_mime_type = true;
+ var content_length: ?usize = null;
+
+ if (response.body.init.headers) |headers_ref| {
+ var headers = Headers.from(headers_ref, request_context.allocator) catch unreachable;
+
+ var i: usize = 0;
+ while (i < headers.entries.len) : (i += 1) {
+ var header = headers.entries.get(i);
+ const name = headers.asStr(header.name);
+ if (strings.eqlComptime(name, "content-type") and headers.asStr(header.value).len > 0) {
+ needs_mime_type = false;
+ }
+
+ if (strings.eqlComptime(name, "content-length")) {
+ content_length = std.fmt.parseInt(usize, headers.asStr(header.value), 10) catch null;
+ continue;
+ }
+
+ // Some headers need to be managed by bun
+ if (strings.eqlComptime(name, "transfer-encoding") or
+ strings.eqlComptime(name, "content-encoding") or
+ strings.eqlComptime(name, "strict-transport-security") or
+ strings.eqlComptime(name, "content-security-policy"))
+ {
+ continue;
+ }
+
+ request_context.appendHeaderSlow(
+ name,
+ headers.asStr(header.value),
+ ) catch unreachable;
+ }
+ }
+
+ if (needs_mime_type) {
+ request_context.appendHeader("Content-Type", response.mimeTypeWithDefault(MimeType.html, request_context));
+ }
+
+ var blob = response.body.value.use();
+ defer blob.deinit();
+
+ const content_length_ = content_length orelse blob.size;
+
+ if (content_length_ == 0) {
+ request_context.sendNoContent() catch return js.JSValueMakeUndefined(ctx);
+ return js.JSValueMakeUndefined(ctx);
+ }
+
+ if (FeatureFlags.strong_etags_for_built_files) {
+ const did_send = request_context.writeETag(blob.sharedView()) catch false;
+ if (did_send) {
+ // defer getAllocator(ctx).destroy(str.ptr);
+ return js.JSValueMakeUndefined(ctx);
+ }
+ }
+
+ defer request_context.done();
+
+ request_context.writeStatusSlow(response.body.init.status_code) catch return js.JSValueMakeUndefined(ctx);
+ request_context.prepareToSendBody(content_length_, false) catch return js.JSValueMakeUndefined(ctx);
+
+ request_context.writeBodyBuf(blob.sharedView()) catch return js.JSValueMakeUndefined(ctx);
+
+ return js.JSValueMakeUndefined(ctx);
+ }
+
+ // our implementation of the event listener already does this
+ // so this is a no-op for us
+ pub fn waitUntil(
+ _: *FetchEvent,
+ ctx: js.JSContextRef,
+ _: js.JSObjectRef,
+ _: js.JSObjectRef,
+ _: []const js.JSValueRef,
+ _: js.ExceptionRef,
+ ) js.JSValueRef {
+ return js.JSValueMakeUndefined(ctx);
+ }
+};
diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig
new file mode 100644
index 000000000..f07cc7c26
--- /dev/null
+++ b/src/bun.js/webcore/streams.zig
@@ -0,0 +1,2208 @@
+const std = @import("std");
+const Api = @import("../../api/schema.zig").Api;
+const bun = @import("../../global.zig");
+const RequestContext = @import("../../http.zig").RequestContext;
+const MimeType = @import("../../http.zig").MimeType;
+const ZigURL = @import("../../url.zig").URL;
+const HTTPClient = @import("http");
+const NetworkThread = HTTPClient.NetworkThread;
+const AsyncIO = NetworkThread.AsyncIO;
+const JSC = @import("javascript_core");
+const js = JSC.C;
+
+const Method = @import("../../http/method.zig").Method;
+const FetchHeaders = JSC.FetchHeaders;
+const ObjectPool = @import("../../pool.zig").ObjectPool;
+const SystemError = JSC.SystemError;
+const Output = @import("../../global.zig").Output;
+const MutableString = @import("../../global.zig").MutableString;
+const strings = @import("../../global.zig").strings;
+const string = @import("../../global.zig").string;
+const default_allocator = @import("../../global.zig").default_allocator;
+const FeatureFlags = @import("../../global.zig").FeatureFlags;
+const ArrayBuffer = @import("../base.zig").ArrayBuffer;
+const Properties = @import("../base.zig").Properties;
+const NewClass = @import("../base.zig").NewClass;
+const d = @import("../base.zig").d;
+const castObj = @import("../base.zig").castObj;
+const getAllocator = @import("../base.zig").getAllocator;
+const JSPrivateDataPtr = @import("../base.zig").JSPrivateDataPtr;
+const GetJSPrivateData = @import("../base.zig").GetJSPrivateData;
+const Environment = @import("../../env.zig");
+const ZigString = JSC.ZigString;
+const IdentityContext = @import("../../identity_context.zig").IdentityContext;
+const JSInternalPromise = JSC.JSInternalPromise;
+const JSPromise = JSC.JSPromise;
+const JSValue = JSC.JSValue;
+const JSError = JSC.JSError;
+const JSGlobalObject = JSC.JSGlobalObject;
+
+const VirtualMachine = @import("../javascript.zig").VirtualMachine;
+const Task = JSC.Task;
+const JSPrinter = @import("../../js_printer.zig");
+const picohttp = @import("picohttp");
+const StringJoiner = @import("../../string_joiner.zig");
+const uws = @import("uws");
+const Blob = JSC.WebCore.Blob;
+const Response = JSC.WebCore.Response;
+const Request = JSC.WebCore.Request;
+
+pub const ReadableStream = struct {
+ value: JSValue,
+ ptr: Source,
+
+ pub fn done(this: *const ReadableStream) void {
+ this.value.unprotect();
+ }
+
+ pub const Tag = enum(i32) {
+ Invalid = -1,
+
+ JavaScript = 0,
+ Blob = 1,
+ File = 2,
+ HTTPRequest = 3,
+ HTTPSRequest = 4,
+ HTTPResponse = 5,
+ HTTPSResponse = 6,
+ };
+ pub const Source = union(Tag) {
+ Invalid: void,
+ JavaScript: void,
+ Blob: *ByteBlobLoader,
+ File: *FileBlobLoader,
+ // HTTPRequest: *HTTPRequest,
+ HTTPRequest: void,
+ // HTTPSRequest: *HTTPSRequest,
+ HTTPSRequest: void,
+ // HTTPRequest: *HTTPRequest,
+ HTTPResponse: void,
+ // HTTPSRequest: *HTTPSRequest,
+ HTTPSResponse: void,
+ };
+
+ extern fn ReadableStreamTag__tagged(globalObject: *JSGlobalObject, possibleReadableStream: JSValue, ptr: *JSValue) Tag;
+ extern fn ReadableStream__isDisturbed(possibleReadableStream: JSValue, globalObject: *JSGlobalObject) bool;
+ extern fn ReadableStream__isLocked(possibleReadableStream: JSValue, globalObject: *JSGlobalObject) bool;
+ extern fn ReadableStream__empty(*JSGlobalObject) JSC.JSValue;
+ extern fn ReadableStream__fromBlob(
+ *JSGlobalObject,
+ store: *anyopaque,
+ offset: usize,
+ length: usize,
+ ) JSC.JSValue;
+
+ pub fn isDisturbed(this: *const ReadableStream, globalObject: *JSGlobalObject) bool {
+ JSC.markBinding();
+ return ReadableStream__isDisturbed(this.value, globalObject);
+ }
+
+ pub fn isLocked(this: *const ReadableStream, globalObject: *JSGlobalObject) bool {
+ JSC.markBinding();
+ return ReadableStream__isLocked(this.value, globalObject);
+ }
+
+ pub fn fromJS(value: JSValue, globalThis: *JSGlobalObject) ?ReadableStream {
+ JSC.markBinding();
+ var ptr = JSValue.zero;
+ return switch (ReadableStreamTag__tagged(globalThis, value, &ptr)) {
+ .JavaScript => ReadableStream{
+ .value = value,
+ .ptr = .{
+ .JavaScript = {},
+ },
+ },
+ .Blob => ReadableStream{
+ .value = value,
+ .ptr = .{
+ .Blob = ptr.asPtr(ByteBlobLoader),
+ },
+ },
+ .File => ReadableStream{
+ .value = value,
+ .ptr = .{
+ .File = ptr.asPtr(FileBlobLoader),
+ },
+ },
+
+ // .HTTPRequest => ReadableStream{
+ // .value = value,
+ // .ptr = .{
+ // .HTTPRequest = ptr.asPtr(HTTPRequest),
+ // },
+ // },
+ // .HTTPSRequest => ReadableStream{
+ // .value = value,
+ // .ptr = .{
+ // .HTTPSRequest = ptr.asPtr(HTTPSRequest),
+ // },
+ // },
+ else => null,
+ };
+ }
+
+ extern fn ZigGlobalObject__createNativeReadableStream(*JSGlobalObject, nativePtr: JSValue, nativeType: JSValue) JSValue;
+
+ pub fn fromNative(globalThis: *JSGlobalObject, id: Tag, ptr: *anyopaque) JSC.JSValue {
+ return ZigGlobalObject__createNativeReadableStream(globalThis, JSValue.fromPtr(ptr), JSValue.jsNumber(@enumToInt(id)));
+ }
+ pub fn fromBlob(globalThis: *JSGlobalObject, blob: *const Blob, recommended_chunk_size: Blob.SizeType) JSC.JSValue {
+ if (comptime JSC.is_bindgen)
+ unreachable;
+ var store = blob.store orelse {
+ return ReadableStream.empty(globalThis);
+ };
+ switch (store.data) {
+ .bytes => {
+ var reader = bun.default_allocator.create(ByteBlobLoader.Source) catch unreachable;
+ reader.* = .{
+ .context = undefined,
+ };
+ reader.context.setup(blob, recommended_chunk_size);
+ return reader.toJS(globalThis);
+ },
+ .file => {
+ var reader = bun.default_allocator.create(FileBlobLoader.Source) catch unreachable;
+ reader.* = .{
+ .context = undefined,
+ };
+ reader.context.setup(store, recommended_chunk_size);
+ return reader.toJS(globalThis);
+ },
+ }
+ }
+
+ pub fn empty(globalThis: *JSGlobalObject) JSC.JSValue {
+ if (comptime JSC.is_bindgen)
+ unreachable;
+
+ return ReadableStream__empty(globalThis);
+ }
+
+ const Base = @import("../../ast/base.zig");
+ pub const StreamTag = enum(usize) {
+ invalid = 0,
+ _,
+
+ pub fn init(filedes: JSC.Node.FileDescriptor) StreamTag {
+ var bytes = [8]u8{ 1, 0, 0, 0, 0, 0, 0, 0 };
+ const filedes_ = @bitCast([8]u8, @as(usize, @truncate(u56, @intCast(usize, filedes))));
+ bytes[1..8].* = filedes_[0..7].*;
+
+ return @intToEnum(StreamTag, @bitCast(u64, bytes));
+ }
+
+ pub fn fd(this: StreamTag) JSC.Node.FileDescriptor {
+ var bytes = @bitCast([8]u8, @enumToInt(this));
+ if (bytes[0] != 1) {
+ return std.math.maxInt(JSC.Node.FileDescriptor);
+ }
+ var out: u64 = 0;
+ @bitCast([8]u8, out)[0..7].* = bytes[1..8].*;
+ return @intCast(JSC.Node.FileDescriptor, out);
+ }
+ };
+};
+
+pub const StreamStart = union(Tag) {
+ empty: void,
+ err: JSC.Node.Syscall.Error,
+ chunk_size: Blob.SizeType,
+ ArrayBufferSink: struct {
+ chunk_size: Blob.SizeType,
+ as_uint8array: bool,
+ stream: bool,
+ },
+ ready: void,
+
+ pub const Tag = enum {
+ empty,
+ err,
+ chunk_size,
+ ArrayBufferSink,
+ ready,
+ };
+
+ pub fn toJS(this: StreamStart, globalThis: *JSGlobalObject) JSC.JSValue {
+ switch (this) {
+ .empty, .ready => {
+ return JSC.JSValue.jsUndefined();
+ },
+ .chunk_size => |chunk| {
+ return JSC.JSValue.jsNumber(@intCast(Blob.SizeType, chunk));
+ },
+ .err => |err| {
+ globalThis.vm().throwError(globalThis, err.toJSC(globalThis));
+ return JSC.JSValue.jsUndefined();
+ },
+ else => {
+ return JSC.JSValue.jsUndefined();
+ },
+ }
+ }
+
+ pub fn fromJS(globalThis: *JSGlobalObject, value: JSValue) StreamStart {
+ if (value.isEmptyOrUndefinedOrNull() or !value.isObject()) {
+ return .{ .empty = {} };
+ }
+
+ if (value.get(globalThis, "chunkSize")) |chunkSize| {
+ return .{ .chunk_size = @intCast(Blob.SizeType, @truncate(i52, chunkSize.toInt64())) };
+ }
+
+ return .{ .empty = {} };
+ }
+
+ pub fn fromJSWithTag(
+ globalThis: *JSGlobalObject,
+ value: JSValue,
+ comptime tag: Tag,
+ ) StreamStart {
+ if (value.isEmptyOrUndefinedOrNull() or !value.isObject()) {
+ return .{ .empty = {} };
+ }
+
+ switch (comptime tag) {
+ .ArrayBufferSink => {
+ var as_uint8array = false;
+ var stream = false;
+ var chunk_size: JSC.WebCore.Blob.SizeType = 0;
+ var empty = true;
+
+ if (value.get(globalThis, "asUint8Array")) |as_array| {
+ as_uint8array = as_array.toBoolean();
+ empty = false;
+ }
+
+ if (value.get(globalThis, "stream")) |as_array| {
+ stream = as_array.toBoolean();
+ empty = false;
+ }
+
+ if (value.get(globalThis, "highWaterMark")) |chunkSize| {
+ empty = false;
+ chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @maximum(0, @truncate(i51, chunkSize.toInt64())));
+ }
+
+ if (!empty) {
+ return .{
+ .ArrayBufferSink = .{
+ .chunk_size = chunk_size,
+ .as_uint8array = as_uint8array,
+ .stream = stream,
+ },
+ };
+ }
+ },
+ else => @compileError("Unuspported tag"),
+ }
+
+ return .{ .empty = {} };
+ }
+};
+
+pub const StreamResult = union(Tag) {
+ owned: bun.ByteList,
+ owned_and_done: bun.ByteList,
+ temporary_and_done: bun.ByteList,
+ temporary: bun.ByteList,
+ into_array: IntoArray,
+ into_array_and_done: IntoArray,
+ pending: *Pending,
+ err: JSC.Node.Syscall.Error,
+ done: void,
+
+ pub const Tag = enum {
+ owned,
+ owned_and_done,
+ temporary_and_done,
+ temporary,
+ into_array,
+ into_array_and_done,
+ pending,
+ err,
+ done,
+ };
+
+ pub fn slice(this: *const StreamResult) []const u8 {
+ return switch (this.*) {
+ .owned => |owned| owned.slice(),
+ .owned_and_done => |owned_and_done| owned_and_done.slice(),
+ .temporary_and_done => |temporary_and_done| temporary_and_done.slice(),
+ .temporary => |temporary| temporary.slice(),
+ else => "",
+ };
+ }
+
+ pub const Writable = union(StreamResult.Tag) {
+ pending: *Writable.Pending,
+
+ err: JSC.Node.Syscall.Error,
+ done: void,
+
+ owned: Blob.SizeType,
+ owned_and_done: Blob.SizeType,
+ temporary_and_done: Blob.SizeType,
+ temporary: Blob.SizeType,
+ into_array: Blob.SizeType,
+ into_array_and_done: Blob.SizeType,
+
+ pub const Pending = struct {
+ frame: anyframe,
+ result: Writable,
+ consumed: Blob.SizeType = 0,
+ used: bool = false,
+ };
+
+ pub fn toPromised(globalThis: *JSGlobalObject, promise: *JSPromise, pending: *Writable.Pending) void {
+ var frame = bun.default_allocator.create(@Frame(Writable.toPromisedWrap)) catch unreachable;
+ frame.* = async Writable.toPromisedWrap(globalThis, promise, pending);
+ pending.frame = frame;
+ }
+
+ pub fn isDone(this: *const Writable) bool {
+ return switch (this.*) {
+ .owned_and_done, .temporary_and_done, .into_array_and_done, .done, .err => true,
+ else => false,
+ };
+ }
+ fn toPromisedWrap(globalThis: *JSGlobalObject, promise: *JSPromise, pending: *Writable.Pending) void {
+ suspend {}
+
+ pending.used = true;
+ const result: Writable = pending.result;
+
+ switch (result) {
+ .err => |err| {
+ promise.reject(globalThis, err.toJSC(globalThis));
+ },
+ .done => {
+ promise.resolve(globalThis, JSValue.jsBoolean(false));
+ },
+ else => {
+ promise.resolve(globalThis, result.toJS(globalThis));
+ },
+ }
+ }
+
+ pub fn toJS(this: Writable, globalThis: *JSGlobalObject) JSValue {
+ return switch (this) {
+ .err => |err| JSC.JSPromise.rejectedPromise(globalThis, JSValue.c(err.toJS(globalThis.ref()))).asValue(globalThis),
+
+ .owned => |len| JSC.JSValue.jsNumber(len),
+ .owned_and_done => |len| JSC.JSValue.jsNumber(len),
+ .temporary_and_done => |len| JSC.JSValue.jsNumber(len),
+ .temporary => |len| JSC.JSValue.jsNumber(len),
+ .into_array => |len| JSC.JSValue.jsNumber(len),
+ .into_array_and_done => |len| JSC.JSValue.jsNumber(len),
+
+ // false == controller.close()
+ // undefined == noop, but we probably won't send it
+ .done => JSC.JSValue.jsBoolean(true),
+
+ .pending => |pending| brk: {
+ var promise = JSC.JSPromise.create(globalThis);
+ Writable.toPromised(globalThis, promise, pending);
+ break :brk promise.asValue(globalThis);
+ },
+ };
+ }
+ };
+
+ pub const IntoArray = struct {
+ value: JSValue = JSValue.zero,
+ len: Blob.SizeType = std.math.maxInt(Blob.SizeType),
+ };
+
+ pub const Pending = struct {
+ frame: anyframe,
+ result: StreamResult,
+ used: bool = false,
+ };
+
+ pub fn isDone(this: *const StreamResult) bool {
+ return switch (this.*) {
+ .owned_and_done, .temporary_and_done, .into_array_and_done, .done, .err => true,
+ else => false,
+ };
+ }
+
+ fn toPromisedWrap(globalThis: *JSGlobalObject, promise: *JSPromise, pending: *Pending) void {
+ suspend {}
+
+ pending.used = true;
+ const result: StreamResult = pending.result;
+
+ switch (result) {
+ .err => |err| {
+ promise.reject(globalThis, err.toJSC(globalThis));
+ },
+ .done => {
+ promise.resolve(globalThis, JSValue.jsBoolean(false));
+ },
+ else => {
+ promise.resolve(globalThis, result.toJS(globalThis));
+ },
+ }
+ }
+
+ pub fn toPromised(globalThis: *JSGlobalObject, promise: *JSPromise, pending: *Pending) void {
+ var frame = bun.default_allocator.create(@Frame(toPromisedWrap)) catch unreachable;
+ frame.* = async toPromisedWrap(globalThis, promise, pending);
+ pending.frame = frame;
+ }
+
+ pub fn toJS(this: *const StreamResult, globalThis: *JSGlobalObject) JSValue {
+ switch (this.*) {
+ .owned => |list| {
+ return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis.ref(), null);
+ },
+ .owned_and_done => |list| {
+ return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis.ref(), null);
+ },
+ .temporary => |temp| {
+ var array = JSC.JSValue.createUninitializedUint8Array(globalThis, temp.len);
+ var slice_ = array.asArrayBuffer(globalThis).?.slice();
+ @memcpy(slice_.ptr, temp.ptr, temp.len);
+ return array;
+ },
+ .temporary_and_done => |temp| {
+ var array = JSC.JSValue.createUninitializedUint8Array(globalThis, temp.len);
+ var slice_ = array.asArrayBuffer(globalThis).?.slice();
+ @memcpy(slice_.ptr, temp.ptr, temp.len);
+ return array;
+ },
+ .into_array => |array| {
+ return JSC.JSValue.jsNumberFromInt64(array.len);
+ },
+ .into_array_and_done => |array| {
+ return JSC.JSValue.jsNumberFromInt64(array.len);
+ },
+ .pending => |pending| {
+ var promise = JSC.JSPromise.create(globalThis);
+ toPromised(globalThis, promise, pending);
+ return promise.asValue(globalThis);
+ },
+
+ .err => |err| {
+ return JSC.JSPromise.rejectedPromise(globalThis, JSValue.c(err.toJS(globalThis.ref()))).asValue(globalThis);
+ },
+
+ // false == controller.close()
+ // undefined == noop, but we probably won't send it
+ .done => {
+ return JSC.JSValue.jsBoolean(false);
+ },
+ }
+ }
+};
+
+pub const Signal = struct {
+ ptr: *anyopaque = @intToPtr(*anyopaque, 0xaaaaaaaa),
+ vtable: VTable = VTable.Dead,
+
+ pub fn isDead(this: Signal) bool {
+ return this.ptr == @intToPtr(*anyopaque, 0xaaaaaaaa);
+ }
+
+ pub fn initWithType(comptime Type: type, handler: *Type) Sink {
+ return .{
+ .ptr = handler,
+ .vtable = VTable.wrap(Type),
+ };
+ }
+
+ pub fn init(handler: anytype) Signal {
+ return initWithType(std.meta.Child(@TypeOf(handler)), handler);
+ }
+
+ pub fn close(this: Signal, err: ?JSC.Node.Syscall.Error) void {
+ if (this.isDead())
+ return;
+ this.vtable.close(this.ptr, err);
+ }
+ pub fn ready(this: Signal, amount: ?Blob.SizeType, offset: ?Blob.SizeType) void {
+ if (this.isDead())
+ return;
+ this.vtable.ready(this.ptr, amount, offset);
+ }
+ pub fn start(this: Signal) void {
+ if (this.isDead())
+ return;
+ this.vtable.start(this.ptr);
+ }
+
+ pub const VTable = struct {
+ pub const OnCloseFn = fn (this: *anyopaque, err: ?JSC.Node.Syscall.Error) void;
+ pub const OnReadyFn = fn (this: *anyopaque, amount: ?Blob.SizeType, offset: ?Blob.SizeType) void;
+ pub const OnStartFn = fn (this: *anyopaque) void;
+ close: OnCloseFn,
+ ready: OnReadyFn,
+ start: OnStartFn,
+
+ const DeadFns = struct {
+ pub fn close(_: *anyopaque, _: ?JSC.Node.Syscall.Error) void {
+ unreachable;
+ }
+ pub fn ready(_: *anyopaque, _: ?Blob.SizeType, _: ?Blob.SizeType) void {
+ unreachable;
+ }
+
+ pub fn start(_: *anyopaque) void {
+ unreachable;
+ }
+ };
+
+ pub const Dead = VTable{ .close = DeadFns.close, .ready = DeadFns.ready, .start = DeadFns.start };
+
+ pub fn wrap(
+ comptime Wrapped: type,
+ ) VTable {
+ const Functions = struct {
+ fn onClose(this: *anyopaque, err: ?JSC.Node.Syscall.Error) void {
+ Wrapped.close(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), err);
+ }
+ fn onReady(this: *anyopaque, amount: ?Blob.SizeType, offset: ?Blob.SizeType) void {
+ Wrapped.ready(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), amount, offset);
+ }
+ fn onStart(this: *anyopaque) void {
+ Wrapped.start(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)));
+ }
+ };
+
+ return VTable{
+ .close = Functions.onClose,
+ .ready = Functions.onReady,
+ .start = Functions.onStart,
+ };
+ }
+ };
+};
+
+pub const Sink = struct {
+ ptr: *anyopaque,
+ vtable: VTable,
+ status: Status = Status.closed,
+ used: bool = false,
+
+ pub const Status = enum {
+ ready,
+ closed,
+ };
+
+ pub const Data = union(enum) {
+ utf16: StreamResult,
+ latin1: StreamResult,
+ bytes: StreamResult,
+ };
+
+ pub fn initWithType(comptime Type: type, handler: *Type) Sink {
+ return .{
+ .ptr = handler,
+ .vtable = VTable.wrap(Type),
+ .status = .ready,
+ .used = false,
+ };
+ }
+
+ pub fn init(handler: anytype) Sink {
+ return initWithType(std.meta.Child(@TypeOf(handler)), handler);
+ }
+
+ pub const UTF8Fallback = struct {
+ const stack_size = 1024;
+ pub fn writeLatin1(comptime Ctx: type, ctx: *Ctx, input: StreamResult, comptime writeFn: anytype) StreamResult.Writable {
+ var str = input.slice();
+ if (strings.isAllASCII(str)) {
+ return writeFn(
+ ctx,
+ input,
+ );
+ }
+
+ if (stack_size >= str.len) {
+ var buf: [stack_size]u8 = undefined;
+ @memcpy(&buf, str.ptr, str.len);
+ strings.replaceLatin1WithUTF8(buf[0..str.len]);
+ if (input.isDone()) {
+ const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..str.len]) });
+ return result;
+ } else {
+ const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..str.len]) });
+ return result;
+ }
+ }
+
+ {
+ var slice = bun.default_allocator.alloc(u8, str.len) catch return .{ .err = JSC.Node.Syscall.Error.oom };
+ @memcpy(slice.ptr, str.ptr, str.len);
+ strings.replaceLatin1WithUTF8(slice[0..str.len]);
+ if (input.isDone()) {
+ return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(slice) });
+ } else {
+ return writeFn(ctx, .{ .owned = bun.ByteList.init(slice) });
+ }
+ }
+ }
+
+ pub fn writeUTF16(comptime Ctx: type, ctx: *Ctx, input: StreamResult, comptime writeFn: anytype) StreamResult.Writable {
+ var str: []const u16 = std.mem.bytesAsSlice(u16, input.slice());
+
+ if (stack_size >= str.len * 2) {
+ var buf: [stack_size]u8 = undefined;
+ const copied = strings.copyUTF16IntoUTF8(&buf, []const u16, str);
+ std.debug.assert(copied.written <= stack_size);
+ std.debug.assert(copied.read <= stack_size);
+ if (input.isDone()) {
+ const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..copied.written]) });
+ return result;
+ } else {
+ const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..copied.written]) });
+ return result;
+ }
+ }
+
+ {
+ var allocated = strings.toUTF8Alloc(bun.default_allocator, str) catch return .{ .err = JSC.Node.Syscall.Error.oom };
+ if (input.isDone()) {
+ return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(allocated) });
+ } else {
+ return writeFn(ctx, .{ .owned = bun.ByteList.init(allocated) });
+ }
+ }
+ }
+ };
+
+ pub const VTable = struct {
+ pub const WriteUTF16Fn = fn (this: *anyopaque, data: StreamResult) StreamResult.Writable;
+ pub const WriteUTF8Fn = fn (this: *anyopaque, data: StreamResult) StreamResult.Writable;
+ pub const WriteLatin1Fn = fn (this: *anyopaque, data: StreamResult) StreamResult.Writable;
+ pub const EndFn = fn (this: *anyopaque, err: ?JSC.Node.Syscall.Error) JSC.Node.Maybe(void);
+ pub const ConnectFn = fn (this: *anyopaque, signal: Signal) JSC.Node.Maybe(void);
+
+ connect: ConnectFn,
+ write: WriteUTF8Fn,
+ writeLatin1: WriteLatin1Fn,
+ writeUTF16: WriteUTF16Fn,
+ end: EndFn,
+
+ pub fn wrap(
+ comptime Wrapped: type,
+ ) VTable {
+ const Functions = struct {
+ pub fn onWrite(this: *anyopaque, data: StreamResult) StreamResult.Writable {
+ return Wrapped.write(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), data);
+ }
+ pub fn onConnect(this: *anyopaque, signal: Signal) JSC.Node.Maybe(void) {
+ return Wrapped.connect(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), signal);
+ }
+ pub fn onWriteLatin1(this: *anyopaque, data: StreamResult) StreamResult.Writable {
+ return Wrapped.writeLatin1(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), data);
+ }
+ pub fn onWriteUTF16(this: *anyopaque, data: StreamResult) StreamResult.Writable {
+ return Wrapped.writeUTF16(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), data);
+ }
+ pub fn onEnd(this: *anyopaque, err: ?JSC.Node.Syscall.Error) JSC.Node.Maybe(void) {
+ return Wrapped.end(@ptrCast(*Wrapped, @alignCast(std.meta.alignment(Wrapped), this)), err);
+ }
+ };
+
+ return VTable{
+ .write = Functions.onWrite,
+ .writeLatin1 = Functions.onWriteLatin1,
+ .writeUTF16 = Functions.onWriteUTF16,
+ .end = Functions.onEnd,
+ .connect = Functions.onConnect,
+ };
+ }
+ };
+
+ pub fn end(this: *Sink, err: ?JSC.Node.Syscall.Error) JSC.Node.Maybe(void) {
+ if (this.status == .closed) {
+ return .{ .result = {} };
+ }
+
+ this.status = .closed;
+ return this.vtable.end(this.ptr, err);
+ }
+
+ pub fn writeLatin1(this: *Sink, data: StreamResult) StreamResult.Writable {
+ if (this.status == .closed) {
+ return .{ .done = {} };
+ }
+
+ const res = this.vtable.writeLatin1(this.ptr, data);
+ this.status = if ((res.isDone()) or this.status == .closed)
+ Status.closed
+ else
+ Status.ready;
+ this.used = true;
+ return res;
+ }
+
+ pub fn writeBytes(this: *Sink, data: StreamResult) StreamResult.Writable {
+ if (this.status == .closed) {
+ return .{ .done = {} };
+ }
+
+ const res = this.vtable.write(this.ptr, data);
+ this.status = if ((res.isDone()) or this.status == .closed)
+ Status.closed
+ else
+ Status.ready;
+ this.used = true;
+ return res;
+ }
+
+ pub fn writeUTF16(this: *Sink, data: StreamResult) StreamResult.Writable {
+ if (this.status == .closed) {
+ return .{ .done = {} };
+ }
+
+ const res = this.vtable.writeUTF16(this.ptr, data);
+ this.status = if ((res.isDone()) or this.status == .closed)
+ Status.closed
+ else
+ Status.ready;
+ this.used = true;
+ return res;
+ }
+
+ pub fn write(this: *Sink, data: Data) StreamResult.Writable {
+ switch (data) {
+ .utf16 => |str| {
+ return this.writeUTF16(str);
+ },
+ .latin1 => |str| {
+ return this.writeLatin1(str);
+ },
+ .bytes => |bytes| {
+ return this.writeBytes(bytes);
+ },
+ }
+ }
+};
+
+pub const ArrayBufferSink = struct {
+ bytes: bun.ByteList,
+ allocator: std.mem.Allocator,
+ done: bool = false,
+ signal: Signal = .{},
+ next: ?Sink = null,
+ streaming: bool = false,
+ as_uint8array: bool = false,
+
+ pub fn connect(this: *ArrayBufferSink, signal: Signal) void {
+ std.debug.assert(this.reader == null);
+ this.signal = signal;
+ }
+
+ pub fn start(this: *ArrayBufferSink, stream_start: StreamStart) JSC.Node.Maybe(void) {
+ var list = this.bytes.listManaged(this.allocator);
+ list.clearAndFree();
+
+ switch (stream_start) {
+ .ArrayBufferSink => |config| {
+ if (config.chunk_size > 0) {
+ list.ensureTotalCapacityPrecise(config.chunk_size) catch return .{ .err = JSC.Node.Syscall.Error.oom };
+ this.bytes.update(list);
+ }
+
+ this.as_uint8array = config.as_uint8array;
+ this.streaming = config.stream;
+ },
+ else => {},
+ }
+
+ this.done = false;
+
+ this.signal.start();
+ return .{ .result = {} };
+ }
+
+ pub fn drain(_: *ArrayBufferSink) JSC.Node.Maybe(void) {
+ return .{ .result = {} };
+ }
+
+ pub fn drainFromJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject) JSC.Node.Maybe(JSValue) {
+ if (this.streaming) {
+ const value: JSValue = switch (this.as_uint8array) {
+ true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array),
+ false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer),
+ };
+ this.bytes.len = 0;
+ return .{ .result = value };
+ }
+
+ return .{ .result = JSValue.jsUndefined() };
+ }
+
+ pub fn finalize(this: *ArrayBufferSink) void {
+ if (this.bytes.len > 0) {
+ this.bytes.listManaged(this.allocator).deinit();
+ this.bytes = bun.ByteList.init("");
+ this.done = true;
+ }
+ }
+
+ pub fn init(allocator: std.mem.Allocator, next: ?Sink) !*ArrayBufferSink {
+ var this = try allocator.create(ArrayBufferSink);
+ this.* = ArrayBufferSink{
+ .bytes = bun.ByteList.init(&.{}),
+ .allocator = allocator,
+ .next = next,
+ };
+ return this;
+ }
+
+ pub fn construct(
+ this: *ArrayBufferSink,
+ allocator: std.mem.Allocator,
+ ) void {
+ this.* = ArrayBufferSink{
+ .bytes = bun.ByteList.init(&.{}),
+ .allocator = allocator,
+ .next = null,
+ };
+ }
+
+ pub fn write(this: *@This(), data: StreamResult) StreamResult.Writable {
+ if (this.next) |*next| {
+ return next.writeBytes(data);
+ }
+
+ const len = this.bytes.write(this.allocator, data.slice()) catch {
+ return .{ .err = JSC.Node.Syscall.Error.oom };
+ };
+ this.signal.ready(null, null);
+ return .{ .owned = len };
+ }
+ pub const writeBytes = write;
+ pub fn writeLatin1(this: *@This(), data: StreamResult) StreamResult.Writable {
+ if (this.next) |*next| {
+ return next.writeLatin1(data);
+ }
+ const len = this.bytes.writeLatin1(this.allocator, data.slice()) catch {
+ return .{ .err = JSC.Node.Syscall.Error.oom };
+ };
+ this.signal.ready(null, null);
+ return .{ .owned = len };
+ }
+ pub fn writeUTF16(this: *@This(), data: StreamResult) StreamResult.Writable {
+ if (this.next) |*next| {
+ return next.writeUTF16(data);
+ }
+ const len = this.bytes.writeUTF16(this.allocator, @ptrCast([*]const u16, @alignCast(@alignOf(u16), data.slice().ptr))[0..std.mem.bytesAsSlice(u16, data.slice()).len]) catch {
+ return .{ .err = JSC.Node.Syscall.Error.oom };
+ };
+ this.signal.ready(null, null);
+ return .{ .owned = len };
+ }
+
+ pub fn end(this: *ArrayBufferSink, err: ?JSC.Node.Syscall.Error) JSC.Node.Maybe(void) {
+ if (this.next) |*next| {
+ return next.end(err);
+ }
+ this.signal.close(err);
+ return .{ .result = {} };
+ }
+
+ pub fn toJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, as_uint8array: bool) JSValue {
+ if (this.streaming) {
+ const value: JSValue = switch (as_uint8array) {
+ true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array),
+ false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer),
+ };
+ this.bytes.len = 0;
+ return value;
+ }
+
+ var list = this.bytes.listManaged(this.allocator);
+ this.bytes = bun.ByteList.init("");
+ return ArrayBuffer.fromBytes(
+ list.toOwnedSlice(),
+ if (as_uint8array)
+ .Uint8Array
+ else
+ .ArrayBuffer,
+ ).toJS(globalThis, null);
+ }
+
+ pub fn endFromJS(this: *ArrayBufferSink, _: *JSGlobalObject) JSC.Node.Maybe(ArrayBuffer) {
+ if (this.done) {
+ return .{ .result = ArrayBuffer.fromBytes(&[_]u8{}, .ArrayBuffer) };
+ }
+
+ std.debug.assert(this.next == null);
+ var list = this.bytes.listManaged(this.allocator);
+ this.bytes = bun.ByteList.init("");
+ this.done = true;
+ this.signal.close(null);
+ return .{ .result = ArrayBuffer.fromBytes(
+ list.toOwnedSlice(),
+ if (this.as_uint8array)
+ .Uint8Array
+ else
+ .ArrayBuffer,
+ ) };
+ }
+
+ pub fn sink(this: *ArrayBufferSink) Sink {
+ return Sink.init(this);
+ }
+
+ pub const JSSink = NewJSSink(@This(), "ArrayBufferSink");
+};
+
+pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type {
+ return struct {
+ sink: SinkType,
+
+ const ThisSink = @This();
+
+ pub const shim = JSC.Shimmer("", std.mem.span(name_), @This());
+ pub const name = std.fmt.comptimePrint("{s}", .{std.mem.span(name_)});
+
+ pub fn createObject(globalThis: *JSGlobalObject, object: *anyopaque) callconv(.C) JSValue {
+ JSC.markBinding();
+
+ return shim.cppFn("createObject", .{ globalThis, object });
+ }
+
+ pub fn fromJS(globalThis: *JSGlobalObject, value: JSValue) ?*anyopaque {
+ JSC.markBinding();
+
+ return shim.cppFn("fromJS", .{ globalThis, value });
+ }
+
+ pub fn construct(globalThis: *JSGlobalObject, _: *JSC.CallFrame) callconv(.C) JSValue {
+ JSC.markBinding();
+ var allocator = globalThis.bunVM().allocator;
+ var this = allocator.create(ThisSink) catch {
+ globalThis.vm().throwError(globalThis, JSC.Node.Syscall.Error.oom.toJSC(
+ globalThis,
+ ));
+ return JSC.JSValue.jsUndefined();
+ };
+ this.sink.construct(allocator);
+ return createObject(globalThis, this);
+ }
+
+ pub fn finalize(ptr: *anyopaque) callconv(.C) void {
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), ptr));
+
+ this.sink.finalize();
+ }
+
+ pub fn write(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
+ JSC.markBinding();
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), fromJS(globalThis, callframe.this()) orelse {
+ const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis);
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }));
+
+ if (comptime @hasDecl(SinkType, "getPendingError")) {
+ if (this.sink.getPendingError()) |err| {
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+ }
+
+ const args = callframe.arguments();
+ if (args.len == 0 or args[0].isEmptyOrUndefinedOrNull() or args[0].isNumber()) {
+ const err = JSC.toTypeError(
+ if (args.len == 0) JSC.Node.ErrorCode.ERR_MISSING_ARGS else JSC.Node.ErrorCode.ERR_INVALID_ARG_TYPE,
+ "write() expects a string, ArrayBufferView, or ArrayBuffer",
+ .{},
+ globalThis,
+ );
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+
+ const arg = args[0];
+ if (arg.asArrayBuffer(globalThis)) |buffer| {
+ const slice = buffer.slice();
+ if (slice.len == 0) {
+ return JSC.JSValue.jsNumber(0);
+ }
+
+ return this.sink.writeBytes(.{ .temporary = bun.ByteList.init(slice) }).toJS(globalThis);
+ }
+
+ const str = arg.getZigString(globalThis);
+ if (str.len == 0) {
+ return JSC.JSValue.jsNumber(0);
+ }
+
+ if (str.is16Bit()) {
+ return this.sink.writeUTF16(.{ .temporary = bun.ByteList.init(std.mem.sliceAsBytes(str.utf16SliceAligned())) }).toJS(globalThis);
+ }
+
+ return this.sink.writeLatin1(.{ .temporary = bun.ByteList.init(str.slice()) }).toJS(globalThis);
+ }
+
+ pub fn writeString(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
+ JSC.markBinding();
+
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), fromJS(globalThis, callframe.this()) orelse {
+ const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis);
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }));
+
+ if (comptime @hasDecl(SinkType, "getPendingError")) {
+ if (this.sink.getPendingError()) |err| {
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+ }
+
+ const args = callframe.arguments();
+ if (args.len == 0 or args[0].isEmptyOrUndefinedOrNull() or args[0].isNumber()) {
+ const err = JSC.toTypeError(
+ if (args.len == 0) JSC.Node.ErrorCode.ERR_MISSING_ARGS else JSC.Node.ErrorCode.ERR_INVALID_ARG_TYPE,
+ "write() expects a string, ArrayBufferView, or ArrayBuffer",
+ .{},
+ globalThis,
+ );
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+
+ const arg = args[0];
+
+ const str = arg.getZigString(globalThis);
+ if (str.len == 0) {
+ return JSC.JSValue.jsNumber(0);
+ }
+
+ if (str.is16Bit()) {
+ return this.sink.writeUTF16(.{ .temporary = str.utf16SliceAligned() }).toJS(globalThis);
+ }
+
+ return this.sink.writeLatin1(.{ .temporary = str.slice() }).toJS(globalThis);
+ }
+
+ pub fn close(globalThis: *JSGlobalObject, sink_ptr: ?*anyopaque) callconv(.C) JSValue {
+ JSC.markBinding();
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), sink_ptr) orelse {
+ const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis);
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ });
+
+ if (comptime @hasDecl(SinkType, "getPendingError")) {
+ if (this.sink.getPendingError()) |err| {
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+ }
+
+ return this.sink.end(null).toJS(globalThis);
+ }
+
+ pub fn drain(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
+ JSC.markBinding();
+
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), fromJS(globalThis, callframe.this()) orelse {
+ const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis);
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }));
+
+ if (comptime @hasDecl(SinkType, "getPendingError")) {
+ if (this.sink.getPendingError()) |err| {
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+ }
+
+ if (comptime @hasDecl(SinkType, "drainFromJS")) {
+ return this.sink.drainFromJS(globalThis).result;
+ }
+
+ return this.sink.drain().toJS(globalThis);
+ }
+
+ pub fn start(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
+ JSC.markBinding();
+
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), fromJS(globalThis, callframe.this()) orelse {
+ const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis);
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }));
+
+ if (comptime @hasDecl(SinkType, "getPendingError")) {
+ if (this.sink.getPendingError()) |err| {
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+ }
+
+ if (comptime @hasField(StreamStart, name_)) {
+ return this.sink.start(
+ if (callframe.argumentsCount() > 0)
+ StreamStart.fromJSWithTag(
+ globalThis,
+ callframe.argument(0),
+ comptime @field(StreamStart, name_),
+ )
+ else
+ StreamStart{ .empty = {} },
+ ).toJS(globalThis);
+ }
+
+ return this.sink.start(
+ if (callframe.argumentsCount() > 0)
+ StreamStart.fromJS(globalThis, callframe.argument(0))
+ else
+ StreamStart{ .empty = {} },
+ ).toJS(globalThis);
+ }
+
+ pub fn end(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
+ JSC.markBinding();
+
+ var this = @ptrCast(*ThisSink, @alignCast(std.meta.alignment(ThisSink), fromJS(globalThis, callframe.this()) orelse {
+ const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis);
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }));
+
+ if (comptime @hasDecl(SinkType, "getPendingError")) {
+ if (this.sink.getPendingError()) |err| {
+ globalThis.vm().throwError(globalThis, err);
+ return JSC.JSValue.jsUndefined();
+ }
+ }
+
+ return this.sink.endFromJS(globalThis).toJS(globalThis);
+ }
+
+ pub const Export = shim.exportFunctions(.{
+ .@"finalize" = finalize,
+ .@"write" = write,
+ .@"close" = close,
+ .@"drain" = drain,
+ .@"start" = start,
+ .@"end" = end,
+ .@"construct" = construct,
+ });
+
+ comptime {
+ if (!JSC.is_bindgen) {
+ @export(finalize, .{ .name = Export[0].symbol_name });
+ @export(write, .{ .name = Export[1].symbol_name });
+ @export(close, .{ .name = Export[2].symbol_name });
+ @export(drain, .{ .name = Export[3].symbol_name });
+ @export(start, .{ .name = Export[4].symbol_name });
+ @export(end, .{ .name = Export[5].symbol_name });
+ @export(construct, .{ .name = Export[6].symbol_name });
+ }
+ }
+
+ pub const Extern = [_][]const u8{ "createObject", "fromJS" };
+ };
+}
+
+pub fn WritableStreamSink(
+ comptime Context: type,
+ comptime onStart: ?fn (this: Context) void,
+ comptime onWrite: fn (this: Context, bytes: []const u8) JSC.Maybe(Blob.SizeType),
+ comptime onAbort: ?fn (this: Context) void,
+ comptime onClose: ?fn (this: Context) void,
+ comptime deinit: ?fn (this: Context) void,
+) type {
+ return struct {
+ context: Context,
+ closed: bool = false,
+ deinited: bool = false,
+ pending_err: ?JSC.Node.Syscall.Error = null,
+ aborted: bool = false,
+
+ abort_signaler: ?*anyopaque = null,
+ onAbortCallback: ?fn (?*anyopaque) void = null,
+
+ close_signaler: ?*anyopaque = null,
+ onCloseCallback: ?fn (?*anyopaque) void = null,
+
+ pub const This = @This();
+
+ pub fn write(this: *This, bytes: []const u8) JSC.Maybe(Blob.SizeType) {
+ if (this.pending_err) |err| {
+ this.pending_err = null;
+ return .{ .err = err };
+ }
+
+ if (this.closed or this.aborted or this.deinited) {
+ return .{ .result = 0 };
+ }
+ return onWrite(&this.context, bytes);
+ }
+
+ pub fn start(this: *This) StreamStart {
+ return onStart(&this.context);
+ }
+
+ pub fn abort(this: *This) void {
+ if (this.closed or this.deinited or this.aborted) {
+ return;
+ }
+
+ this.aborted = true;
+ onAbort(&this.context);
+ }
+
+ pub fn didAbort(this: *This) void {
+ if (this.closed or this.deinited or this.aborted) {
+ return;
+ }
+ this.aborted = true;
+
+ if (this.onAbortCallback) |cb| {
+ this.onAbortCallback = null;
+ cb(this.abort_signaler);
+ }
+ }
+
+ pub fn didClose(this: *This) void {
+ if (this.closed or this.deinited or this.aborted) {
+ return;
+ }
+ this.closed = true;
+
+ if (this.onCloseCallback) |cb| {
+ this.onCloseCallback = null;
+ cb(this.close_signaler);
+ }
+ }
+
+ pub fn close(this: *This) void {
+ if (this.closed or this.deinited or this.aborted) {
+ return;
+ }
+
+ this.closed = true;
+ onClose(this.context);
+ }
+
+ pub fn deinit(this: *This) void {
+ if (this.deinited) {
+ return;
+ }
+ this.deinited = true;
+ deinit(this.context);
+ }
+
+ pub fn getError(this: *This) ?JSC.Node.Syscall.Error {
+ if (this.pending_err) |err| {
+ this.pending_err = null;
+ return err;
+ }
+
+ return null;
+ }
+ };
+}
+
+pub fn HTTPServerWritable(comptime ssl: bool) type {
+ return struct {
+ pub const UWSResponse = uws.NewApp(ssl).Response;
+ res: *UWSResponse,
+ pending_chunk: []const u8 = "",
+ is_listening_for_abort: bool = false,
+ wrote: Blob.SizeType = 0,
+ callback: anyframe->JSC.Maybe(Blob.SizeType) = undefined,
+ writable: Writable,
+
+ pub fn onWritable(this: *@This(), available: c_ulong, _: *UWSResponse) callconv(.C) bool {
+ const to_write = @minimum(@truncate(Blob.SizeType, available), @truncate(Blob.SizeType, this.pending_chunk.len));
+ if (!this.res.write(this.pending_chunk[0..to_write])) {
+ return true;
+ }
+
+ this.pending_chunk = this.pending_chunk[to_write..];
+ this.wrote += to_write;
+ if (this.pending_chunk.len > 0) {
+ this.res.onWritable(*@This(), onWritable, this);
+ return true;
+ }
+
+ var callback = this.callback;
+ this.callback = undefined;
+ // TODO: clarify what the boolean means
+ resume callback;
+ bun.default_allocator.destroy(callback.*);
+ return false;
+ }
+
+ pub fn onStart(this: *@This()) void {
+ if (this.res.hasResponded()) {
+ this.writable.didClose();
+ }
+ }
+ pub fn onWrite(this: *@This(), bytes: []const u8) JSC.Maybe(Blob.SizeType) {
+ if (this.writable.aborted) {
+ return .{ .result = 0 };
+ }
+
+ if (this.pending_chunk.len > 0) {
+ return JSC.Maybe(Blob.SizeType).retry;
+ }
+
+ if (this.res.write(bytes)) {
+ return .{ .result = @truncate(Blob.SizeType, bytes.len) };
+ }
+
+ this.pending_chunk = bytes;
+ this.writable.pending_err = null;
+ suspend {
+ if (!this.is_listening_for_abort) {
+ this.is_listening_for_abort = true;
+ this.res.onAborted(*@This(), onAborted);
+ }
+
+ this.res.onWritable(*@This(), onWritable, this);
+ var frame = bun.default_allocator.create(@TypeOf(@Frame(onWrite))) catch unreachable;
+ this.callback = frame;
+ frame.* = @frame().*;
+ }
+ const wrote = this.wrote;
+ this.wrote = 0;
+ if (this.writable.pending_err) |err| {
+ this.writable.pending_err = null;
+ return .{ .err = err };
+ }
+ return .{ .result = wrote };
+ }
+
+ // client-initiated
+ pub fn onAborted(this: *@This(), _: *UWSResponse) void {
+ this.writable.didAbort();
+ }
+ // writer-initiated
+ pub fn onAbort(this: *@This()) void {
+ this.res.end("", true);
+ }
+ pub fn onClose(this: *@This()) void {
+ this.res.end("", false);
+ }
+ pub fn deinit(_: *@This()) void {}
+
+ pub const Writable = WritableStreamSink(@This(), onStart, onWrite, onAbort, onClose, deinit);
+ };
+}
+pub const HTTPSWriter = HTTPServerWritable(true);
+pub const HTTPWriter = HTTPServerWritable(false);
+
+pub fn ReadableStreamSource(
+ comptime Context: type,
+ comptime name_: []const u8,
+ comptime onStart: anytype,
+ comptime onPull: anytype,
+ comptime onCancel: fn (this: *Context) void,
+ comptime deinit: fn (this: *Context) void,
+) type {
+ return struct {
+ context: Context,
+ cancelled: bool = false,
+ deinited: bool = false,
+ pending_err: ?JSC.Node.Syscall.Error = null,
+ close_handler: ?fn (*anyopaque) void = null,
+ close_ctx: ?*anyopaque = null,
+ close_jsvalue: JSValue = JSValue.zero,
+ globalThis: *JSGlobalObject = undefined,
+
+ const This = @This();
+ const ReadableStreamSourceType = @This();
+
+ pub fn pull(this: *This, buf: []u8) StreamResult {
+ return onPull(&this.context, buf, JSValue.zero);
+ }
+
+ pub fn start(
+ this: *This,
+ ) StreamStart {
+ return onStart(&this.context);
+ }
+
+ pub fn pullFromJS(this: *This, buf: []u8, view: JSValue) StreamResult {
+ return onPull(&this.context, buf, view);
+ }
+
+ pub fn startFromJS(this: *This) StreamStart {
+ return onStart(&this.context);
+ }
+
+ pub fn cancel(this: *This) void {
+ if (this.cancelled or this.deinited) {
+ return;
+ }
+
+ this.cancelled = true;
+ onCancel(&this.context);
+ }
+
+ pub fn onClose(this: *This) void {
+ if (this.cancelled or this.deinited) {
+ return;
+ }
+
+ if (this.close_handler) |close| {
+ this.close_handler = null;
+ close(this.close_ctx);
+ }
+ }
+
+ pub fn deinit(this: *This) void {
+ if (this.deinited) {
+ return;
+ }
+ this.deinited = true;
+ deinit(&this.context);
+ }
+
+ pub fn getError(this: *This) ?JSC.Node.Syscall.Error {
+ if (this.pending_err) |err| {
+ this.pending_err = null;
+ return err;
+ }
+
+ return null;
+ }
+
+ pub fn toJS(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject) JSC.JSValue {
+ return ReadableStream.fromNative(globalThis, Context.tag, this);
+ }
+
+ pub const JSReadableStreamSource = struct {
+ pub const shim = JSC.Shimmer(std.mem.span(name_), "JSReadableStreamSource", @This());
+ pub const name = std.fmt.comptimePrint("{s}_JSReadableStreamSource", .{std.mem.span(name_)});
+
+ pub fn pull(globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSC.JSValue {
+ var this = callFrame.argument(0).asPtr(ReadableStreamSourceType);
+ const view = callFrame.argument(1);
+ view.ensureStillAlive();
+ var buffer = view.asArrayBuffer(globalThis) orelse return JSC.JSValue.jsUndefined();
+ return processResult(
+ globalThis,
+ callFrame,
+ this.pullFromJS(buffer.slice(), view),
+ );
+ }
+ pub fn start(globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSC.JSValue {
+ var this = callFrame.argument(0).asPtr(ReadableStreamSourceType);
+ switch (this.startFromJS()) {
+ .empty => return JSValue.jsNumber(0),
+ .ready => return JSValue.jsNumber(16384),
+ .chunk_size => |size| return JSValue.jsNumber(size),
+ .err => |err| {
+ globalThis.vm().throwError(globalThis, err.toJSC(globalThis));
+ return JSC.JSValue.jsUndefined();
+ },
+ else => unreachable,
+ }
+ }
+
+ pub fn processResult(globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame, result: StreamResult) JSC.JSValue {
+ switch (result) {
+ .err => |err| {
+ globalThis.vm().throwError(globalThis, err.toJSC(globalThis));
+ return JSValue.jsUndefined();
+ },
+ .temporary_and_done, .owned_and_done, .into_array_and_done => {
+ JSC.C.JSObjectSetPropertyAtIndex(globalThis.ref(), callFrame.argument(2).asObjectRef(), 0, JSValue.jsBoolean(true).asObjectRef(), null);
+ return result.toJS(globalThis);
+ },
+ else => return result.toJS(globalThis),
+ }
+ }
+ pub fn cancel(_: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSC.JSValue {
+ var this = callFrame.argument(0).asPtr(ReadableStreamSourceType);
+ this.cancel();
+ return JSC.JSValue.jsUndefined();
+ }
+ pub fn setClose(globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSC.JSValue {
+ var this = callFrame.argument(0).asPtr(ReadableStreamSourceType);
+ this.close_ctx = this;
+ this.close_handler = JSReadableStreamSource.onClose;
+ this.globalThis = globalThis;
+ this.close_jsvalue = callFrame.argument(1);
+ return JSC.JSValue.jsUndefined();
+ }
+
+ fn onClose(ptr: *anyopaque) void {
+ var this = bun.cast(*ReadableStreamSourceType, ptr);
+ _ = this.close_jsvalue.call(this.globalThis, &.{});
+ // this.closer
+ }
+
+ pub fn deinit(_: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSC.JSValue {
+ var this = callFrame.argument(0).asPtr(ReadableStreamSourceType);
+ this.deinit();
+ return JSValue.jsUndefined();
+ }
+
+ pub fn load(globalThis: *JSGlobalObject) callconv(.C) JSC.JSValue {
+ if (comptime JSC.is_bindgen) unreachable;
+ if (comptime Environment.allow_assert) {
+ // this should be cached per globals object
+ const OnlyOnce = struct {
+ pub threadlocal var last_globals: ?*JSGlobalObject = null;
+ };
+ if (OnlyOnce.last_globals) |last_globals| {
+ std.debug.assert(last_globals != globalThis);
+ }
+ OnlyOnce.last_globals = globalThis;
+ }
+ return JSC.JSArray.from(globalThis, &.{
+ JSC.NewFunction(globalThis, null, 1, JSReadableStreamSource.pull),
+ JSC.NewFunction(globalThis, null, 1, JSReadableStreamSource.start),
+ JSC.NewFunction(globalThis, null, 1, JSReadableStreamSource.cancel),
+ JSC.NewFunction(globalThis, null, 1, JSReadableStreamSource.setClose),
+ JSC.NewFunction(globalThis, null, 1, JSReadableStreamSource.deinit),
+ });
+ }
+
+ pub const Export = shim.exportFunctions(.{
+ .@"load" = load,
+ });
+
+ comptime {
+ if (!JSC.is_bindgen) {
+ @export(load, .{ .name = Export[0].symbol_name });
+ _ = JSReadableStreamSource.pull;
+ _ = JSReadableStreamSource.start;
+ _ = JSReadableStreamSource.cancel;
+ _ = JSReadableStreamSource.setClose;
+ _ = JSReadableStreamSource.load;
+ }
+ }
+ };
+ };
+}
+
+pub const ByteBlobLoader = struct {
+ offset: Blob.SizeType = 0,
+ store: *Blob.Store,
+ chunk_size: Blob.SizeType = 1024 * 1024 * 2,
+ remain: Blob.SizeType = 1024 * 1024 * 2,
+ done: bool = false,
+
+ pub const tag = ReadableStream.Tag.Blob;
+
+ pub fn setup(
+ this: *ByteBlobLoader,
+ blob: *const Blob,
+ user_chunk_size: Blob.SizeType,
+ ) void {
+ blob.store.?.ref();
+ var blobe = blob.*;
+ blobe.resolveSize();
+ this.* = ByteBlobLoader{
+ .offset = blobe.offset,
+ .store = blobe.store.?,
+ .chunk_size = if (user_chunk_size > 0) @minimum(user_chunk_size, blobe.size) else @minimum(1024 * 1024 * 2, blobe.size),
+ .remain = blobe.size,
+ .done = false,
+ };
+ }
+
+ pub fn onStart(this: *ByteBlobLoader) StreamStart {
+ return .{ .chunk_size = this.chunk_size };
+ }
+
+ pub fn onPull(this: *ByteBlobLoader, buffer: []u8, array: JSC.JSValue) StreamResult {
+ array.ensureStillAlive();
+ defer array.ensureStillAlive();
+ if (this.done) {
+ return .{ .done = {} };
+ }
+
+ var temporary = this.store.sharedView();
+ temporary = temporary[this.offset..];
+
+ temporary = temporary[0..@minimum(buffer.len, @minimum(temporary.len, this.remain))];
+ if (temporary.len == 0) {
+ this.store.deref();
+ this.done = true;
+ return .{ .done = {} };
+ }
+
+ const copied = @intCast(Blob.SizeType, temporary.len);
+
+ this.remain -|= copied;
+ this.offset +|= copied;
+ @memcpy(buffer.ptr, temporary.ptr, temporary.len);
+ if (this.remain == 0) {
+ return .{ .into_array_and_done = .{ .value = array, .len = copied } };
+ }
+
+ return .{ .into_array = .{ .value = array, .len = copied } };
+ }
+
+ pub fn onCancel(_: *ByteBlobLoader) void {}
+
+ pub fn deinit(this: *ByteBlobLoader) void {
+ if (!this.done) {
+ this.done = true;
+ this.store.deref();
+ }
+
+ bun.default_allocator.destroy(this);
+ }
+
+ pub const Source = ReadableStreamSource(@This(), "ByteBlob", onStart, onPull, onCancel, deinit);
+};
+
+pub fn RequestBodyStreamer(
+ comptime is_ssl: bool,
+) type {
+ return struct {
+ response: *uws.NewApp(is_ssl).Response,
+
+ pub const tag = if (is_ssl)
+ ReadableStream.Tag.HTTPRequest
+ else if (is_ssl)
+ ReadableStream.Tag.HTTPSRequest;
+
+ pub fn onStart(this: *ByteBlobLoader) StreamStart {
+ return .{ .chunk_size = this.chunk_size };
+ }
+
+ pub fn onPull(this: *ByteBlobLoader, buffer: []u8, array: JSC.JSValue) StreamResult {
+ array.ensureStillAlive();
+ defer array.ensureStillAlive();
+ if (this.done) {
+ return .{ .done = {} };
+ }
+
+ var temporary = this.store.sharedView();
+ temporary = temporary[this.offset..];
+
+ temporary = temporary[0..@minimum(buffer.len, @minimum(temporary.len, this.remain))];
+ if (temporary.len == 0) {
+ this.store.deref();
+ this.done = true;
+ return .{ .done = {} };
+ }
+
+ const copied = @intCast(Blob.SizeType, temporary.len);
+
+ this.remain -|= copied;
+ this.offset +|= copied;
+ @memcpy(buffer.ptr, temporary.ptr, temporary.len);
+ if (this.remain == 0) {
+ return .{ .into_array_and_done = .{ .value = array, .len = copied } };
+ }
+
+ return .{ .into_array = .{ .value = array, .len = copied } };
+ }
+
+ pub fn onCancel(_: *ByteBlobLoader) void {}
+
+ pub fn deinit(this: *ByteBlobLoader) void {
+ if (!this.done) {
+ this.done = true;
+ this.store.deref();
+ }
+
+ bun.default_allocator.destroy(this);
+ }
+
+ pub const label = if (is_ssl) "HTTPSRequestBodyStreamer" else "HTTPRequestBodyStreamer";
+ pub const Source = ReadableStreamSource(@This(), label, onStart, onPull, onCancel, deinit);
+ };
+}
+
+pub const FileBlobLoader = struct {
+ buf: []u8 = &[_]u8{},
+ protected_view: JSC.JSValue = JSC.JSValue.zero,
+ fd: JSC.Node.FileDescriptor = 0,
+ auto_close: bool = false,
+ loop: *JSC.EventLoop = undefined,
+ mode: JSC.Node.Mode = 0,
+ store: *Blob.Store,
+ total_read: Blob.SizeType = 0,
+ finalized: bool = false,
+ callback: anyframe = undefined,
+ pending: StreamResult.Pending = StreamResult.Pending{
+ .frame = undefined,
+ .used = false,
+ .result = .{ .done = {} },
+ },
+ cancelled: bool = false,
+ user_chunk_size: Blob.SizeType = 0,
+ scheduled_count: u32 = 0,
+ concurrent: Concurrent = Concurrent{},
+ input_tag: StreamResult.Tag = StreamResult.Tag.done,
+
+ const FileReader = @This();
+
+ const run_on_different_thread_size = bun.huge_allocator_threshold;
+
+ pub const tag = ReadableStream.Tag.File;
+
+ pub fn setup(this: *FileBlobLoader, store: *Blob.Store, chunk_size: Blob.SizeType) void {
+ store.ref();
+ this.* = .{
+ .loop = JSC.VirtualMachine.vm.eventLoop(),
+ .auto_close = store.data.file.pathlike == .path,
+ .store = store,
+ .user_chunk_size = chunk_size,
+ };
+ }
+
+ pub fn watch(this: *FileReader) void {
+ _ = JSC.VirtualMachine.vm.poller.watch(this.fd, .read, this, callback);
+ this.scheduled_count += 1;
+ }
+
+ const Concurrent = struct {
+ read: Blob.SizeType = 0,
+ task: NetworkThread.Task = .{ .callback = Concurrent.taskCallback },
+ completion: AsyncIO.Completion = undefined,
+ read_frame: anyframe = undefined,
+ chunk_size: Blob.SizeType = 0,
+ main_thread_task: JSC.AnyTask = .{ .callback = onJSThread, .ctx = null },
+
+ pub fn taskCallback(task: *NetworkThread.Task) void {
+ var this = @fieldParentPtr(FileBlobLoader, "concurrent", @fieldParentPtr(Concurrent, "task", task));
+ var frame = HTTPClient.getAllocator().create(@Frame(runAsync)) catch unreachable;
+ _ = @asyncCall(std.mem.asBytes(frame), undefined, runAsync, .{this});
+ }
+
+ pub fn onRead(this: *FileBlobLoader, completion: *HTTPClient.NetworkThread.Completion, result: AsyncIO.ReadError!usize) void {
+ this.concurrent.read = @truncate(Blob.SizeType, result catch |err| {
+ if (@hasField(HTTPClient.NetworkThread.Completion, "result")) {
+ this.pending.result = .{
+ .err = JSC.Node.Syscall.Error{
+ .errno = @intCast(JSC.Node.Syscall.Error.Int, -completion.result),
+ .syscall = .read,
+ },
+ };
+ } else {
+ this.pending.result = .{
+ .err = JSC.Node.Syscall.Error{
+ // this is too hacky
+ .errno = @truncate(JSC.Node.Syscall.Error.Int, @intCast(u16, @maximum(1, @errorToInt(err)))),
+ .syscall = .read,
+ },
+ };
+ }
+ this.concurrent.read = 0;
+ resume this.concurrent.read_frame;
+ return;
+ });
+
+ resume this.concurrent.read_frame;
+ }
+
+ pub fn scheduleRead(this: *FileBlobLoader) void {
+ if (comptime Environment.isMac) {
+ var remaining = this.buf[this.concurrent.read..];
+
+ while (remaining.len > 0) {
+ const to_read = @minimum(@as(usize, this.concurrent.chunk_size), remaining.len);
+ switch (JSC.Node.Syscall.read(this.fd, remaining[0..to_read])) {
+ .err => |err| {
+ const retry = comptime if (Environment.isLinux)
+ std.os.E.WOULDBLOCK
+ else
+ std.os.E.AGAIN;
+
+ switch (err.getErrno()) {
+ retry => break,
+ else => {},
+ }
+
+ this.pending.result = .{ .err = err };
+ scheduleMainThreadTask(this);
+ return;
+ },
+ .result => |result| {
+ this.concurrent.read += @intCast(Blob.SizeType, result);
+ remaining = remaining[result..];
+
+ if (result == 0) {
+ remaining.len = 0;
+ break;
+ }
+ },
+ }
+ }
+
+ if (remaining.len == 0) {
+ scheduleMainThreadTask(this);
+ return;
+ }
+ }
+
+ AsyncIO.global.read(
+ *FileBlobLoader,
+ this,
+ onRead,
+ &this.concurrent.completion,
+ this.fd,
+ this.buf[this.concurrent.read..],
+ null,
+ );
+
+ suspend {
+ var _frame = @frame();
+ var this_frame = HTTPClient.getAllocator().create(std.meta.Child(@TypeOf(_frame))) catch unreachable;
+ this_frame.* = _frame.*;
+ this.concurrent.read_frame = this_frame;
+ }
+
+ scheduleMainThreadTask(this);
+ }
+
+ pub fn onJSThread(task_ctx: *anyopaque) void {
+ var this: *FileBlobLoader = bun.cast(*FileBlobLoader, task_ctx);
+ const protected_view = this.protected_view;
+ defer protected_view.unprotect();
+ this.protected_view = JSC.JSValue.zero;
+
+ if (this.finalized and this.scheduled_count == 0) {
+ if (!this.pending.used) {
+ resume this.pending.frame;
+ }
+ this.scheduled_count -= 1;
+
+ this.deinit();
+
+ return;
+ }
+
+ if (!this.pending.used and this.pending.result == .err and this.concurrent.read == 0) {
+ resume this.pending.frame;
+ this.scheduled_count -= 1;
+ this.finalize();
+ return;
+ }
+
+ if (this.concurrent.read == 0) {
+ this.pending.result = .{ .done = {} };
+ resume this.pending.frame;
+ this.scheduled_count -= 1;
+ this.finalize();
+ return;
+ }
+
+ this.pending.result = this.handleReadChunk(@as(usize, this.concurrent.read));
+ resume this.pending.frame;
+ this.scheduled_count -= 1;
+ if (this.pending.result.isDone()) {
+ this.finalize();
+ }
+ }
+
+ pub fn scheduleMainThreadTask(this: *FileBlobLoader) void {
+ this.concurrent.main_thread_task.ctx = this;
+ this.loop.enqueueTaskConcurrent(JSC.Task.init(&this.concurrent.main_thread_task));
+ }
+
+ fn runAsync(this: *FileBlobLoader) void {
+ this.concurrent.read = 0;
+
+ Concurrent.scheduleRead(this);
+
+ suspend {
+ HTTPClient.getAllocator().destroy(@frame());
+ }
+ }
+ };
+
+ pub fn scheduleAsync(this: *FileReader, chunk_size: Blob.SizeType) void {
+ this.scheduled_count += 1;
+ this.loop.virtual_machine.active_tasks +|= 1;
+
+ NetworkThread.init() catch {};
+ this.concurrent.chunk_size = chunk_size;
+ NetworkThread.global.pool.schedule(.{ .head = &this.concurrent.task, .tail = &this.concurrent.task, .len = 1 });
+ }
+
+ const default_fifo_chunk_size = 1024;
+ const default_file_chunk_size = 1024 * 1024 * 2;
+ pub fn onStart(this: *FileBlobLoader) StreamStart {
+ var file = &this.store.data.file;
+ var file_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
+ var auto_close = this.auto_close;
+ defer this.auto_close = auto_close;
+ var fd = if (!auto_close)
+ file.pathlike.fd
+ else switch (JSC.Node.Syscall.open(file.pathlike.path.sliceZ(&file_buf), std.os.O.RDONLY | std.os.O.NONBLOCK | std.os.O.CLOEXEC, 0)) {
+ .result => |_fd| _fd,
+ .err => |err| {
+ this.deinit();
+ return .{ .err = err.withPath(file.pathlike.path.slice()) };
+ },
+ };
+
+ if (!auto_close) {
+ // ensure we have non-blocking IO set
+ const flags = std.os.fcntl(fd, std.os.F.GETFL, 0) catch return .{ .err = JSC.Node.Syscall.Error.fromCode(std.os.E.BADF, .fcntl) };
+
+ // if we do not, clone the descriptor and set non-blocking
+ // it is important for us to clone it so we don't cause Weird Things to happen
+ if ((flags & std.os.O.NONBLOCK) == 0) {
+ auto_close = true;
+ fd = @intCast(@TypeOf(fd), std.os.fcntl(fd, std.os.F.DUPFD, 0) catch return .{ .err = JSC.Node.Syscall.Error.fromCode(std.os.E.BADF, .fcntl) });
+ _ = std.os.fcntl(fd, std.os.F.SETFL, flags | std.os.O.NONBLOCK) catch return .{ .err = JSC.Node.Syscall.Error.fromCode(std.os.E.BADF, .fcntl) };
+ }
+ }
+
+ const stat: std.os.Stat = switch (JSC.Node.Syscall.fstat(fd)) {
+ .result => |result| result,
+ .err => |err| {
+ if (auto_close) {
+ _ = JSC.Node.Syscall.close(fd);
+ }
+ this.deinit();
+ return .{ .err = err.withPath(file.pathlike.path.slice()) };
+ },
+ };
+
+ if (std.os.S.ISDIR(stat.mode)) {
+ const err = JSC.Node.Syscall.Error.fromCode(.ISDIR, .fstat);
+ if (auto_close) {
+ _ = JSC.Node.Syscall.close(fd);
+ }
+ this.deinit();
+ return .{ .err = err };
+ }
+
+ if (std.os.S.ISSOCK(stat.mode)) {
+ const err = JSC.Node.Syscall.Error.fromCode(.INVAL, .fstat);
+
+ if (auto_close) {
+ _ = JSC.Node.Syscall.close(fd);
+ }
+ this.deinit();
+ return .{ .err = err };
+ }
+
+ file.seekable = std.os.S.ISREG(stat.mode);
+ file.mode = @intCast(JSC.Node.Mode, stat.mode);
+ this.mode = file.mode;
+
+ if (file.seekable orelse false)
+ file.max_size = @intCast(Blob.SizeType, stat.size);
+
+ if ((file.seekable orelse false) and file.max_size == 0) {
+ if (auto_close) {
+ _ = JSC.Node.Syscall.close(fd);
+ }
+ this.deinit();
+ return .{ .empty = {} };
+ }
+
+ this.fd = fd;
+ this.auto_close = auto_close;
+
+ const chunk_size = this.calculateChunkSize(std.math.maxInt(usize));
+ return .{ .chunk_size = @truncate(Blob.SizeType, chunk_size) };
+ }
+
+ fn calculateChunkSize(this: *FileBlobLoader, available_to_read: usize) usize {
+ const file = &this.store.data.file;
+
+ const chunk_size: usize = if (this.user_chunk_size > 0)
+ @as(usize, this.user_chunk_size)
+ else if (file.seekable orelse false)
+ @as(usize, default_file_chunk_size)
+ else
+ @as(usize, default_fifo_chunk_size);
+
+ return if (file.max_size > 0)
+ if (available_to_read != std.math.maxInt(usize)) @minimum(chunk_size, available_to_read) else @minimum(@maximum(this.total_read, file.max_size) - this.total_read, chunk_size)
+ else
+ @minimum(available_to_read, chunk_size);
+ }
+
+ pub fn onPullInto(this: *FileBlobLoader, buffer: []u8, view: JSC.JSValue) StreamResult {
+ const chunk_size = this.calculateChunkSize(std.math.maxInt(usize));
+ this.input_tag = .into_array;
+
+ switch (chunk_size) {
+ 0 => {
+ std.debug.assert(this.store.data.file.seekable orelse false);
+ this.finalize();
+ return .{ .done = {} };
+ },
+ run_on_different_thread_size...std.math.maxInt(@TypeOf(chunk_size)) => {
+ this.protected_view = view;
+ this.protected_view.protect();
+ // should never be reached
+ this.pending.result = .{
+ .err = JSC.Node.Syscall.Error.todo,
+ };
+ this.buf = buffer;
+
+ this.scheduleAsync(@truncate(Blob.SizeType, chunk_size));
+
+ return .{ .pending = &this.pending };
+ },
+ else => {},
+ }
+
+ return this.read(buffer, view);
+ }
+
+ fn maybeAutoClose(this: *FileBlobLoader) void {
+ if (this.auto_close) {
+ _ = JSC.Node.Syscall.close(this.fd);
+ this.auto_close = false;
+ }
+ }
+
+ fn handleReadChunk(this: *FileBlobLoader, result: usize) StreamResult {
+ this.total_read += @intCast(Blob.SizeType, result);
+ const remaining: Blob.SizeType = if (this.store.data.file.seekable orelse false)
+ this.store.data.file.max_size -| this.total_read
+ else
+ @as(Blob.SizeType, std.math.maxInt(Blob.SizeType));
+
+ // this handles:
+ // - empty file
+ // - stream closed for some reason
+ if ((result == 0 and remaining == 0)) {
+ this.finalize();
+ return .{ .done = {} };
+ }
+
+ const has_more = remaining > 0;
+
+ if (!has_more) {
+ return .{ .into_array_and_done = .{ .len = @truncate(Blob.SizeType, result) } };
+ }
+
+ return .{ .into_array = .{ .len = @truncate(Blob.SizeType, result) } };
+ }
+
+ pub fn read(
+ this: *FileBlobLoader,
+ read_buf: []u8,
+ view: JSC.JSValue,
+ ) StreamResult {
+ const rc =
+ JSC.Node.Syscall.read(this.fd, read_buf);
+
+ switch (rc) {
+ .err => |err| {
+ const retry =
+ std.os.E.AGAIN;
+
+ switch (err.getErrno()) {
+ retry => {
+ this.protected_view = view;
+ this.protected_view.protect();
+ this.buf = read_buf;
+ this.watch();
+ return .{
+ .pending = &this.pending,
+ };
+ },
+ else => {},
+ }
+ const sys = if (this.store.data.file.pathlike == .path and this.store.data.file.pathlike.path.slice().len > 0)
+ err.withPath(this.store.data.file.pathlike.path.slice())
+ else
+ err;
+
+ this.finalize();
+ return .{ .err = sys };
+ },
+ .result => |result| {
+ return this.handleReadChunk(result);
+ },
+ }
+ }
+
+ pub fn callback(task: ?*anyopaque, sizeOrOffset: i64, _: u16) void {
+ var this: *FileReader = bun.cast(*FileReader, task.?);
+ this.scheduled_count -= 1;
+ const protected_view = this.protected_view;
+ defer protected_view.unprotect();
+ this.protected_view = JSValue.zero;
+
+ var available_to_read: usize = std.math.maxInt(usize);
+ if (comptime Environment.isMac) {
+ if (std.os.S.ISREG(this.mode)) {
+ // Returns when the file pointer is not at the end of
+ // file. data contains the offset from current position
+ // to end of file, and may be negative.
+ available_to_read = @intCast(usize, @maximum(sizeOrOffset, 0));
+ } else if (std.os.S.ISCHR(this.mode) or std.os.S.ISFIFO(this.mode)) {
+ available_to_read = @intCast(usize, @maximum(sizeOrOffset, 0));
+ }
+ }
+ if (this.finalized and this.scheduled_count == 0) {
+ if (!this.pending.used) {
+ // should never be reached
+ this.pending.result = .{
+ .err = JSC.Node.Syscall.Error.todo,
+ };
+ resume this.pending.frame;
+ }
+ this.deinit();
+ return;
+ }
+ if (this.cancelled)
+ return;
+
+ if (this.buf.len == 0) {
+ return;
+ } else {
+ this.buf.len = @minimum(this.buf.len, available_to_read);
+ }
+
+ this.pending.result = this.read(this.buf, this.protected_view);
+ resume this.pending.frame;
+ }
+
+ pub fn finalize(this: *FileBlobLoader) void {
+ if (this.finalized)
+ return;
+ this.finalized = true;
+
+ this.maybeAutoClose();
+
+ this.store.deref();
+ }
+
+ pub fn onCancel(this: *FileBlobLoader) void {
+ this.cancelled = true;
+
+ this.deinit();
+ }
+
+ pub fn deinit(this: *FileBlobLoader) void {
+ this.finalize();
+ if (this.scheduled_count == 0 and !this.pending.used) {
+ this.destroy();
+ }
+ }
+
+ pub fn destroy(this: *FileBlobLoader) void {
+ bun.default_allocator.destroy(this);
+ }
+
+ pub const Source = ReadableStreamSource(@This(), "FileBlobLoader", onStart, onPullInto, onCancel, deinit);
+};
+
+// pub const HTTPRequest = RequestBodyStreamer(false);
+// pub const HTTPSRequest = RequestBodyStreamer(true);
+// pub fn ResponseBodyStreamer(comptime is_ssl: bool) type {
+// return struct {
+// const Streamer = @This();
+// pub fn onEnqueue(this: *Streamer, buffer: []u8, ): anytype,
+// pub fn onEnqueueMany(this: *Streamer): anytype,
+// pub fn onClose(this: *Streamer): anytype,
+// pub fn onError(this: *Streamer): anytype,
+// };
+// }