aboutsummaryrefslogtreecommitdiff
path: root/benchmark/bench
diff options
context:
space:
mode:
authorGravatar github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> 2025-06-05 14:25:23 +0000
committerGravatar github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> 2025-06-05 14:25:23 +0000
commite586d7d704d475afe3373a1de6ae20d504f79d6d (patch)
tree7e3fa24807cebd48a86bd40f866d792181191ee9 /benchmark/bench
downloadastro-e586d7d704d475afe3373a1de6ae20d504f79d6d.tar.gz
astro-e586d7d704d475afe3373a1de6ae20d504f79d6d.tar.zst
astro-e586d7d704d475afe3373a1de6ae20d504f79d6d.zip
Sync from a8e1c0a7402940e0fc5beef669522b315052df1blatest
Diffstat (limited to 'benchmark/bench')
-rw-r--r--benchmark/bench/README.md7
-rw-r--r--benchmark/bench/_template.js12
-rw-r--r--benchmark/bench/_util.js32
-rw-r--r--benchmark/bench/cli-startup.js73
-rw-r--r--benchmark/bench/codspeed.bench.js48
-rw-r--r--benchmark/bench/memory.js61
-rw-r--r--benchmark/bench/render.js121
-rw-r--r--benchmark/bench/server-stress.js115
8 files changed, 469 insertions, 0 deletions
diff --git a/benchmark/bench/README.md b/benchmark/bench/README.md
new file mode 100644
index 000000000..9d3312880
--- /dev/null
+++ b/benchmark/bench/README.md
@@ -0,0 +1,7 @@
+# bench
+
+This `bench` folder contains different benchmarking files that you can run via `astro-benchmark <bench-file-name>`, e.g. `astro-benchmark memory`. Files that start with an underscore are not benchmarking files.
+
+Benchmarking files will run against a project to measure its performance, and write the results down as JSON in the `results` folder. The `results` folder is gitignored and its result files can be safely deleted if you're not using them.
+
+You can duplicate `_template.js` to start a new benchmark test. All shared utilities are kept in `_util.js`.
diff --git a/benchmark/bench/_template.js b/benchmark/bench/_template.js
new file mode 100644
index 000000000..ae96d72ad
--- /dev/null
+++ b/benchmark/bench/_template.js
@@ -0,0 +1,12 @@
+/** Default project to run for this benchmark if not specified */
+export const defaultProject = 'project-name';
+
+/**
+ * Run benchmark on `projectDir` and write results to `outputFile`.
+ * Use `console.log` to report the results too. Logs that start with 10 `=`
+ * and end with 10 `=` will be extracted by CI to display in the PR comment.
+ * Usually after the first 10 `=` you'll want to add a title like `#### Test`.
+ * @param {URL} _projectDir
+ * @param {URL} _outputFile
+ */
+export async function run(_projectDir, _outputFile) {}
diff --git a/benchmark/bench/_util.js b/benchmark/bench/_util.js
new file mode 100644
index 000000000..d9dfe5b19
--- /dev/null
+++ b/benchmark/bench/_util.js
@@ -0,0 +1,32 @@
+import { createRequire } from 'node:module';
+import path from 'node:path';
+
+const astroPkgPath = createRequire(import.meta.url).resolve('astro/package.json');
+
+export const astroBin = path.resolve(astroPkgPath, '../astro.js');
+
+/** @typedef {{ avg: number, stdev: number, max: number }} Stat */
+
+/**
+ * @param {number[]} numbers
+ * @returns {Stat}
+ */
+export function calculateStat(numbers) {
+ const avg = numbers.reduce((a, b) => a + b, 0) / numbers.length;
+ const stdev = Math.sqrt(
+ numbers.map((x) => Math.pow(x - avg, 2)).reduce((a, b) => a + b, 0) / numbers.length,
+ );
+ const max = Math.max(...numbers);
+ return { avg, stdev, max };
+}
+
+export async function makeProject(name) {
+ console.log('Making project:', name);
+ const projectDir = new URL(`../projects/${name}/`, import.meta.url);
+
+ const makeProjectMod = await import(`../make-project/${name}.js`);
+ await makeProjectMod.run(projectDir);
+
+ console.log('Finished making project:', name);
+ return projectDir;
+}
diff --git a/benchmark/bench/cli-startup.js b/benchmark/bench/cli-startup.js
new file mode 100644
index 000000000..9144797d7
--- /dev/null
+++ b/benchmark/bench/cli-startup.js
@@ -0,0 +1,73 @@
+import { fileURLToPath } from 'node:url';
+import { markdownTable } from 'markdown-table';
+import { exec } from 'tinyexec';
+import { astroBin, calculateStat } from './_util.js';
+
+/** Default project to run for this benchmark if not specified */
+export const defaultProject = 'render-default';
+
+/**
+ * @param {URL} projectDir
+ */
+export async function run(projectDir) {
+ const root = fileURLToPath(projectDir);
+
+ console.log('Benchmarking `astro --help`...');
+ const helpStat = await benchmarkCommand('node', [astroBin, '--help'], root);
+ console.log('Done');
+
+ console.log('Benchmarking `astro preferences list`...');
+ const infoStat = await benchmarkCommand('node', [astroBin, 'preferences', 'list'], root);
+ console.log('Done');
+
+ console.log('Result preview:');
+ console.log('='.repeat(10));
+ console.log(`#### CLI Startup\n\n`);
+ console.log(
+ printResult({
+ 'astro --help': helpStat,
+ 'astro info': infoStat,
+ }),
+ );
+ console.log('='.repeat(10));
+}
+
+/**
+ * @param {string} command
+ * @param {string[]} args
+ * @param {string} root
+ * @returns {Promise<import('./_util.js').Stat>}
+ */
+async function benchmarkCommand(command, args, root) {
+ /** @type {number[]} */
+ const durations = [];
+
+ for (let i = 0; i < 10; i++) {
+ const start = performance.now();
+ await exec(command, args, { nodeOptions: { cwd: root }, throwOnError: true });
+ durations.push(performance.now() - start);
+ }
+
+ // From the 10 durations, calculate average, standard deviation, and max value
+ return calculateStat(durations);
+}
+
+/**
+ * @param {Record<string, import('./_util.js').Stat>} result
+ */
+function printResult(result) {
+ return markdownTable(
+ [
+ ['Command', 'Avg (ms)', 'Stdev (ms)', 'Max (ms)'],
+ ...Object.entries(result).map(([command, { avg, stdev, max }]) => [
+ command,
+ avg.toFixed(2),
+ stdev.toFixed(2),
+ max.toFixed(2),
+ ]),
+ ],
+ {
+ align: ['l', 'r', 'r', 'r'],
+ },
+ );
+}
diff --git a/benchmark/bench/codspeed.bench.js b/benchmark/bench/codspeed.bench.js
new file mode 100644
index 000000000..4073ebed8
--- /dev/null
+++ b/benchmark/bench/codspeed.bench.js
@@ -0,0 +1,48 @@
+import { fileURLToPath } from 'node:url';
+import { exec } from 'tinyexec';
+import { beforeAll, bench, describe } from 'vitest';
+import { astroBin, makeProject } from './_util.js';
+let streamingApp;
+let nonStreamingApp;
+beforeAll(async () => {
+ const render = await makeProject('render-bench');
+ const root = fileURLToPath(render);
+ await exec(astroBin, ['build'], {
+ nodeOptions: {
+ cwd: root,
+ stdio: 'inherit',
+ },
+ });
+ const entry = new URL('./dist/server/entry.mjs', `file://${root}`);
+ const { manifest, createApp } = await import(entry);
+ streamingApp = createApp(manifest, true);
+ nonStreamingApp = createApp(manifest, false);
+}, 900000);
+
+describe('Bench rendering', () => {
+ bench('Rendering: streaming [true], .astro file', async () => {
+ const request = new Request(new URL('http://exmpale.com/astro'));
+ await streamingApp.render(request);
+ });
+ bench('Rendering: streaming [true], .md file', async () => {
+ const request = new Request(new URL('http://exmpale.com/md'));
+ await streamingApp.render(request);
+ });
+ bench('Rendering: streaming [true], .mdx file', async () => {
+ const request = new Request(new URL('http://exmpale.com/mdx'));
+ await streamingApp.render(request);
+ });
+
+ bench('Rendering: streaming [false], .astro file', async () => {
+ const request = new Request(new URL('http://exmpale.com/astro'));
+ await nonStreamingApp.render(request);
+ });
+ bench('Rendering: streaming [false], .md file', async () => {
+ const request = new Request(new URL('http://exmpale.com/md'));
+ await nonStreamingApp.render(request);
+ });
+ bench('Rendering: streaming [false], .mdx file', async () => {
+ const request = new Request(new URL('http://exmpale.com/mdx'));
+ await nonStreamingApp.render(request);
+ });
+});
diff --git a/benchmark/bench/memory.js b/benchmark/bench/memory.js
new file mode 100644
index 000000000..4f9153cc0
--- /dev/null
+++ b/benchmark/bench/memory.js
@@ -0,0 +1,61 @@
+import fs from 'node:fs/promises';
+import { fileURLToPath } from 'node:url';
+import { markdownTable } from 'markdown-table';
+import { exec } from 'tinyexec';
+import { astroBin } from './_util.js';
+
+/** @typedef {Record<string, import('../../packages/astro/src/core/config/timer').Stat>} AstroTimerStat */
+
+/** Default project to run for this benchmark if not specified */
+export const defaultProject = 'memory-default';
+
+/**
+ * @param {URL} projectDir
+ * @param {URL} outputFile
+ */
+export async function run(projectDir, outputFile) {
+ const root = fileURLToPath(projectDir);
+ const outputFilePath = fileURLToPath(outputFile);
+
+ console.log('Building and benchmarking...');
+ await exec('node', ['--expose-gc', '--max_old_space_size=10000', astroBin, 'build'], {
+ nodeOptions: {
+ cwd: root,
+ stdio: 'inherit',
+ env: {
+ ASTRO_TIMER_PATH: outputFilePath,
+ },
+ },
+ throwOnError: true,
+ });
+
+ console.log('Raw results written to', outputFilePath);
+
+ console.log('Result preview:');
+ console.log('='.repeat(10));
+ console.log(`#### Memory\n\n`);
+ console.log(printResult(JSON.parse(await fs.readFile(outputFilePath, 'utf-8'))));
+ console.log('='.repeat(10));
+
+ console.log('Done!');
+}
+
+/**
+ * @param {AstroTimerStat} output
+ */
+function printResult(output) {
+ return markdownTable(
+ [
+ ['', 'Elapsed time (s)', 'Memory used (MB)', 'Final memory (MB)'],
+ ...Object.entries(output).map(([name, stat]) => [
+ name,
+ (stat.elapsedTime / 1000).toFixed(2),
+ (stat.heapUsedChange / 1024 / 1024).toFixed(2),
+ (stat.heapUsedTotal / 1024 / 1024).toFixed(2),
+ ]),
+ ],
+ {
+ align: ['l', 'r', 'r', 'r'],
+ },
+ );
+}
diff --git a/benchmark/bench/render.js b/benchmark/bench/render.js
new file mode 100644
index 000000000..02f75a73b
--- /dev/null
+++ b/benchmark/bench/render.js
@@ -0,0 +1,121 @@
+import fs from 'node:fs/promises';
+import http from 'node:http';
+import path from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { markdownTable } from 'markdown-table';
+import { waitUntilBusy } from 'port-authority';
+import { exec } from 'tinyexec';
+import { renderPages } from '../make-project/render-default.js';
+import { astroBin, calculateStat } from './_util.js';
+
+const port = 4322;
+
+export const defaultProject = 'render-default';
+
+/**
+ * @param {URL} projectDir
+ * @param {URL} outputFile
+ */
+export async function run(projectDir, outputFile) {
+ const root = fileURLToPath(projectDir);
+
+ console.log('Building...');
+ await exec(astroBin, ['build'], {
+ nodeOptions: {
+ cwd: root,
+ stdio: 'inherit',
+ },
+ throwOnError: true,
+ });
+
+ console.log('Previewing...');
+ const previewProcess = exec(astroBin, ['preview', '--port', port], {
+ nodeOptions: {
+ cwd: root,
+ stdio: 'inherit',
+ },
+ throwOnError: true,
+ });
+
+ console.log('Waiting for server ready...');
+ await waitUntilBusy(port, { timeout: 5000 });
+
+ console.log('Running benchmark...');
+ const result = await benchmarkRenderTime();
+
+ console.log('Killing server...');
+ if (!previewProcess.kill('SIGTERM')) {
+ console.warn('Failed to kill server process id:', previewProcess.pid);
+ }
+
+ console.log('Writing results to', fileURLToPath(outputFile));
+ await fs.writeFile(outputFile, JSON.stringify(result, null, 2));
+
+ console.log('Result preview:');
+ console.log('='.repeat(10));
+ console.log(`#### Render\n\n`);
+ console.log(printResult(result));
+ console.log('='.repeat(10));
+
+ console.log('Done!');
+}
+
+export async function benchmarkRenderTime(portToListen = port) {
+ /** @type {Record<string, number[]>} */
+ const result = {};
+ for (const fileName of renderPages) {
+ // Render each file 100 times and push to an array
+ for (let i = 0; i < 100; i++) {
+ const pathname = '/' + fileName.slice(0, -path.extname(fileName).length);
+ const renderTime = await fetchRenderTime(`http://localhost:${portToListen}${pathname}`);
+ if (!result[pathname]) result[pathname] = [];
+ result[pathname].push(renderTime);
+ }
+ }
+ /** @type {Record<string, import('./_util.js').Stat>} */
+ const processedResult = {};
+ for (const [pathname, times] of Object.entries(result)) {
+ // From the 100 results, calculate average, standard deviation, and max value
+ processedResult[pathname] = calculateStat(times);
+ }
+ return processedResult;
+}
+
+/**
+ * @param {Record<string, import('./_util.js').Stat>} result
+ */
+function printResult(result) {
+ return markdownTable(
+ [
+ ['Page', 'Avg (ms)', 'Stdev (ms)', 'Max (ms)'],
+ ...Object.entries(result).map(([pathname, { avg, stdev, max }]) => [
+ pathname,
+ avg.toFixed(2),
+ stdev.toFixed(2),
+ max.toFixed(2),
+ ]),
+ ],
+ {
+ align: ['l', 'r', 'r', 'r'],
+ },
+ );
+}
+
+/**
+ * Simple fetch utility to get the render time sent by `@benchmark/timer` in plain text
+ * @param {string} url
+ * @returns {Promise<number>}
+ */
+function fetchRenderTime(url) {
+ return new Promise((resolve, reject) => {
+ const req = http.request(url, (res) => {
+ res.setEncoding('utf8');
+ let data = '';
+ res.on('data', (chunk) => (data += chunk));
+ res.on('error', (e) => reject(e));
+ res.on('end', () => resolve(+data));
+ });
+ req.on('error', (e) => reject(e));
+ req.end();
+ });
+}
diff --git a/benchmark/bench/server-stress.js b/benchmark/bench/server-stress.js
new file mode 100644
index 000000000..5bcaa6963
--- /dev/null
+++ b/benchmark/bench/server-stress.js
@@ -0,0 +1,115 @@
+import fs from 'node:fs/promises';
+import { fileURLToPath } from 'node:url';
+import autocannon from 'autocannon';
+import { markdownTable } from 'markdown-table';
+import { waitUntilBusy } from 'port-authority';
+import pb from 'pretty-bytes';
+import { exec } from 'tinyexec';
+import { astroBin } from './_util.js';
+
+const port = 4321;
+
+export const defaultProject = 'server-stress-default';
+
+/**
+ * @param {URL} projectDir
+ * @param {URL} outputFile
+ */
+export async function run(projectDir, outputFile) {
+ const root = fileURLToPath(projectDir);
+
+ console.log('Building...');
+ await exec(astroBin, ['build'], {
+ nodeOptions: {
+ cwd: root,
+ stdio: 'inherit',
+ },
+ throwOnError: true,
+ });
+
+ console.log('Previewing...');
+ const previewProcess = await exec(astroBin, ['preview', '--port', port], {
+ nodeOptions: {
+ cwd: root,
+ stdio: 'inherit',
+ },
+ });
+
+ console.log('Waiting for server ready...');
+ await waitUntilBusy(port, { timeout: 5000 });
+
+ console.log('Running benchmark...');
+ const result = await benchmarkCannon();
+
+ console.log('Killing server...');
+ if (!previewProcess.kill('SIGTERM')) {
+ console.warn('Failed to kill server process id:', previewProcess.pid);
+ }
+
+ console.log('Writing results to', fileURLToPath(outputFile));
+ await fs.writeFile(outputFile, JSON.stringify(result, null, 2));
+
+ console.log('Result preview:');
+ console.log('='.repeat(10));
+ console.log(`#### Server stress\n\n`);
+ console.log(printResult(result));
+ console.log('='.repeat(10));
+
+ console.log('Done!');
+}
+
+/**
+ * @returns {Promise<import('autocannon').Result>}
+ */
+export async function benchmarkCannon() {
+ return new Promise((resolve, reject) => {
+ const instance = autocannon(
+ {
+ url: `http://localhost:${port}`,
+ connections: 100,
+ duration: 30,
+ pipelining: 10,
+ },
+ (err, result) => {
+ if (err) {
+ reject(err);
+ } else {
+ // @ts-expect-error untyped but documented
+ instance.stop();
+ resolve(result);
+ }
+ },
+ );
+ autocannon.track(instance, { renderResultsTable: false });
+ });
+}
+
+/**
+ * @param {import('autocannon').Result} output
+ */
+function printResult(output) {
+ const { latency: l, requests: r, throughput: t } = output;
+
+ const latencyTable = markdownTable(
+ [
+ ['', 'Avg', 'Stdev', 'Max'],
+ ['Latency', `${l.average} ms`, `${l.stddev} ms`, `${l.max} ms`],
+ ],
+ {
+ align: ['l', 'r', 'r', 'r'],
+ },
+ );
+
+ const reqAndBytesTable = markdownTable(
+ [
+ ['', 'Avg', 'Stdev', 'Min', 'Total in 30s'],
+ ['Req/Sec', r.average, r.stddev, r.min, `${(r.total / 1000).toFixed(1)}k requests`],
+ ['Bytes/Sec', pb(t.average), pb(t.stddev), pb(t.min), `${pb(t.total)} read`],
+ ],
+ {
+ align: ['l', 'r', 'r', 'r', 'r'],
+ },
+ );
+
+ return `${latencyTable}\n\n${reqAndBytesTable}`;
+}