aboutsummaryrefslogtreecommitdiff
path: root/bench/concat.js
blob: 46c6e0f7d528aa90b5ffca607f751bdeee40b012 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import { bench, group, run } from "mitata";
import { readFileSync } from "fs";
import { allocUnsafe } from "bun";

function polyfill(chunks) {
  var size = 0;
  for (const chunk of chunks) {
    size += chunk.byteLength;
  }
  var buffer = new ArrayBuffer(size);
  var view = new Uint8Array(buffer);
  var offset = 0;
  for (const chunk of chunks) {
    view.set(chunk, offset);
    offset += chunk.byteLength;
  }
  return buffer;
}

function polyfillUninitialized(chunks) {
  var size = 0;
  for (const chunk of chunks) {
    size += chunk.byteLength;
  }
  var view = allocUnsafe(size);

  var offset = 0;
  for (const chunk of chunks) {
    view.set(chunk, offset);
    offset += chunk.byteLength;
  }
  return view.buffer;
}

const chunkGroups = [
  [Uint8Array.from([123]), Uint8Array.from([456]), Uint8Array.from([789])],
  Array.from(readFileSync(import.meta.path)).map((a) => Uint8Array.from([a])),
  [readFileSync(import.meta.path)],
  Array.from({ length: 42 }, () => readFileSync(import.meta.path)),
  Array.from({ length: 2 }, () =>
    new TextEncoder().encode(readFileSync(import.meta.path, "utf8").repeat(100))
  ),
];

for (const chunks of chunkGroups) {
  group(
    `${chunks.reduce(
      (prev, curr, i, a) => prev + curr.byteLength,
      0
    )} bytes for ${chunks.length} chunks`,
    () => {
      bench("Bun.concatArrayBuffers", () => {
        Bun.concatArrayBuffers(chunks);
      });
      bench("Uint8Array.set", () => {
        polyfill(chunks);
      });

      bench("Uint8Array.set (uninitialized memory)", () => {
        polyfillUninitialized(chunks);
      });
    }
  );
}

await run();