aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Jarred Sumner <jarred@jarredsumner.com> 2023-08-07 18:51:16 -0700
committerGravatar GitHub <noreply@github.com> 2023-08-07 18:51:16 -0700
commitf2f227720b3ffe1797a0a4e500e9a9a639167dc6 (patch)
treea3fab7a9c55775c8bd637161aa2551a7659a21b8
parent0b183beb51367004795d8a431eb06bb2fa4f8250 (diff)
downloadbun-f2f227720b3ffe1797a0a4e500e9a9a639167dc6.tar.gz
bun-f2f227720b3ffe1797a0a4e500e9a9a639167dc6.tar.zst
bun-f2f227720b3ffe1797a0a4e500e9a9a639167dc6.zip
WASM test analyzer (#4043)
* wasm * WASM test scanner * Update Makefile * Update Makefile * Configurable heap limit * slightly better error * Update js_parser.zig * Update path.test.js * Update node.mjs --------- Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
-rw-r--r--Makefile31
-rw-r--r--build.zig6
-rw-r--r--packages/bun-wasm/index.ts178
-rw-r--r--packages/bun-wasm/test/node.mjs31
-rw-r--r--root.zig3
-rw-r--r--root_wasm.zig26
-rw-r--r--src/api/schema.d.ts32
-rw-r--r--src/api/schema.js110
-rw-r--r--src/api/schema.peechy22
-rw-r--r--src/api/schema.zig83
-rw-r--r--src/bun.zig17
m---------src/deps/mimalloc0
-rw-r--r--src/feature_flags.zig2
-rw-r--r--src/js_ast.zig35
-rw-r--r--src/js_lexer.zig2
-rw-r--r--src/js_parser.zig145
-rw-r--r--src/logger.zig2
-rw-r--r--src/main_wasm.zig344
-rw-r--r--src/mimalloc_arena.zig6
-rw-r--r--src/output.zig4
-rw-r--r--src/renamer.zig4
-rw-r--r--src/sourcemap/CodeCoverage.zig10
-rw-r--r--src/string_immutable.zig23
-rw-r--r--src/tracy.zig16
-rw-r--r--test/js/node/path/path.test.js2
25 files changed, 965 insertions, 169 deletions
diff --git a/Makefile b/Makefile
index 17d76c753..070d03ec4 100644
--- a/Makefile
+++ b/Makefile
@@ -703,44 +703,44 @@ dev-build-obj-wasm:
.PHONY: dev-wasm
dev-wasm: dev-build-obj-wasm
- emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
- -g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
+ emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
+ -g2 -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
- packages/debug-bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
+ packages/debug-bun-freestanding-wasm32/bun-wasm.o --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/debug-bun-freestanding-wasm32/bun-wasm.wasm
- cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
+ cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
.PHONY: build-obj-wasm
build-obj-wasm:
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
- emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
- -g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
+ emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
+ -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
- cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
+ cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
.PHONY: build-obj-wasm-small
build-obj-wasm-small:
- $(ZIG) build bun-wasm -Doptimize=ReleaseSmall -Dtarget=wasm32-freestanding
- emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
- -g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
+ $(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
+ emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
+ -Oz -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/bun-freestanding-wasm32/bun-wasm.o -Oz --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
- cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
+ cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
.PHONY: wasm
-wasm: api build-obj-wasm-small
+wasm: api mimalloc-wasm build-obj-wasm-small
@rm -rf packages/bun-wasm/*.{d.ts,js,wasm,cjs,mjs,tsbuildinfo}
@cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
@cp src/api/schema.d.ts packages/bun-wasm/schema.d.ts
@cp src/api/schema.js packages/bun-wasm/schema.js
@cd packages/bun-wasm && $(NPM_CLIENT) run tsc -- -p .
- @$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=esm --minify 2> /dev/null
+ @bun build --sourcemap=external --external=fs --outdir=packages/bun-wasm --target=browser --minify ./packages/bun-wasm/index.ts
@mv packages/bun-wasm/index.js packages/bun-wasm/index.mjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.mjs.map
- @$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
+ @$(ESBUILD) --sourcemap=external --external:fs --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.cjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.cjs.map
@rm -rf packages/bun-wasm/*.tsbuildinfo
@@ -1379,7 +1379,8 @@ mimalloc:
mimalloc-wasm:
- cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
+ rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
+ cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -GNinja -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=OFF .; emmake cmake --build .;
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
# alias for link, incase anyone still types that
diff --git a/build.zig b/build.zig
index 569a6bdd2..4a20d8481 100644
--- a/build.zig
+++ b/build.zig
@@ -182,7 +182,7 @@ pub fn build(b: *Build) !void {
is_debug_build = optimize == OptimizeMode.Debug;
const bun_executable_name = if (optimize == std.builtin.OptimizeMode.Debug) "bun-debug" else "bun";
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
- "src/main_wasm.zig"
+ "root_wasm.zig"
else
"root.zig";
@@ -322,7 +322,7 @@ pub fn build(b: *Build) !void {
const wasm = b.step("bun-wasm", "Build WASM");
var wasm_step = b.addStaticLibrary(.{
.name = "bun-wasm",
- .root_source_file = FileSource.relative("src/main_wasm.zig"),
+ .root_source_file = FileSource.relative("root_wasm.zig"),
.target = target,
.optimize = optimize,
});
@@ -332,6 +332,8 @@ pub fn build(b: *Build) !void {
// wasm_step.link_emit_relocs = true;
// wasm_step.single_threaded = true;
try configureObjectStep(b, wasm_step, @TypeOf(target), target, obj.main_pkg_path.?);
+ var build_opts = default_build_options;
+ wasm_step.addOptions("build_options", build_opts.step(b));
}
{
diff --git a/packages/bun-wasm/index.ts b/packages/bun-wasm/index.ts
index b72400661..10a7b3aa4 100644
--- a/packages/bun-wasm/index.ts
+++ b/packages/bun-wasm/index.ts
@@ -1,11 +1,14 @@
// @ts-nocheck
import { ByteBuffer } from "peechy/bb";
import {
+ Loader as BunLoader,
+ TestKind,
+ decodeGetTestsResponse,
decodeScanResult,
decodeTransformResponse,
+ encodeGetTestsRequest,
encodeScan,
encodeTransform,
- Loader as BunLoader,
type ScanResult,
type TransformResponse,
} from "./schema";
@@ -15,9 +18,13 @@ export enum Loader {
tsx = BunLoader.tsx,
ts = BunLoader.ts,
}
-
+const testKindMap = {
+ [TestKind.describe_fn]: "describe",
+ [TestKind.test_fn]: "test",
+};
+const capturedErrors = [];
+let captureErrors = false;
export type { ScanResult, TransformResponse };
-
function normalizeLoader(file_name: string, loader?: Loader): BunLoader {
return (
(loader
@@ -82,12 +89,22 @@ var scratch2: Uint8Array;
const env = {
console_log(slice: number) {
+ const text = Bun._wasmPtrLenToString(slice);
+ if (captureErrors) {
+ capturedErrors.push(text);
+ return;
+ }
//@ts-ignore
- console.log(Bun._wasmPtrLenToString(slice));
+ console.log(text);
},
console_error(slice: number) {
//@ts-ignore
- console.error(Bun._wasmPtrLenToString(slice));
+ const text = Bun._wasmPtrLenToString(slice);
+ if (captureErrors) {
+ capturedErrors.push(text);
+ return;
+ }
+ console.error(text);
},
console_warn(slice: number) {
//@ts-ignore
@@ -148,7 +165,6 @@ const env = {
},
emscripten_notify_memory_growth() {},
};
-
export class Bun {
private static has_initialized = false;
// @ts-ignore-line
@@ -179,63 +195,115 @@ export class Bun {
return Bun._decoder.decode(region);
}
- static async init(url, fetch = globalThis.fetch) {
- // globalThis.sucraseTransform = sucraseTransform;
+ static async init(url, heapSize = 64_000_000, fetch = globalThis.fetch) {
scratch = new Uint8Array(8096);
if (Bun.has_initialized) {
return;
}
-
- if (globalThis?.WebAssembly?.instantiateStreaming) {
- Bun.wasm_source = await globalThis.WebAssembly.instantiateStreaming(
- fetch(url),
- { env: env, wasi_snapshot_preview1: Wasi },
- );
- } else if (typeof window !== "undefined") {
- const resp = await fetch(url);
- Bun.wasm_source = await globalThis.WebAssembly.instantiate(
- await resp.arrayBuffer(),
- {
+ if (typeof process === "undefined") {
+ if (globalThis?.WebAssembly?.instantiateStreaming) {
+ Bun.wasm_source = await globalThis.WebAssembly.instantiateStreaming(fetch(url), {
env: env,
wasi_snapshot_preview1: Wasi,
- },
- );
- // is it node?
+ });
+ } else if (typeof window !== "undefined") {
+ const resp = await fetch(url);
+ Bun.wasm_source = await globalThis.WebAssembly.instantiate(await resp.arrayBuffer(), {
+ env: env,
+ wasi_snapshot_preview1: Wasi,
+ });
+ // is it node?
+ }
} else {
//@ts-ignore
const fs = await import("fs");
- Bun.wasm_source = await globalThis.WebAssembly.instantiate(
- fs.readFileSync(url),
- {
- env: env,
- wasi_snapshot_preview1: Wasi,
- },
- );
+ Bun.wasm_source = await globalThis.WebAssembly.instantiate(fs.readFileSync(url), {
+ env: env,
+ wasi_snapshot_preview1: Wasi,
+ });
}
- const res = Bun.wasm_exports.init();
+ const res = Bun.wasm_exports.init(heapSize);
+
if (res < 0) {
- throw `[Bun] Failed to initialize WASM module: code ${res}`;
+ throw new Error(`[Bun] Failed to initialize WASM module: code ${res}`);
}
Bun.has_initialized = true;
}
- static transformSync(
- content: Uint8Array | string,
- file_name: string,
- loader?: Loader,
- ): TransformResponse {
- if (!Bun.has_initialized) {
- throw "Please run await Bun.init(wasm_url) before using this.";
+ static getTests(content: Uint8Array | string, filename = "my.test.tsx") {
+ const bb = new ByteBuffer(scratch);
+ bb.length = 0;
+ bb.index = 0;
+ const contents_buffer = content;
+
+ encodeGetTestsRequest(
+ {
+ contents: contents_buffer,
+ path: filename,
+ },
+ bb,
+ );
+
+ const data = bb.toUint8Array();
+
+ const input_ptr = Bun.wasm_exports.bun_malloc(data.length);
+ var buffer = Bun._wasmPtrToSlice(input_ptr);
+ buffer.set(data);
+ captureErrors = true;
+ try {
+ var resp_ptr = Bun.wasm_exports.getTests(input_ptr);
+ } catch (e) {
+ throw e;
+ } finally {
+ captureErrors = false;
+ Bun.wasm_exports.bun_free(input_ptr);
+ }
+
+ if (Number(resp_ptr) === 0) {
+ if (capturedErrors.length) {
+ const err = capturedErrors.slice();
+ capturedErrors.length = 0;
+ throw new Error(err.join("\n").trim());
+ }
+
+ throw new Error("Failed to parse");
+ }
+
+ if (capturedErrors.length) {
+ Bun.wasm_exports.bun_free(resp_ptr);
+ const err = capturedErrors.slice();
+ capturedErrors.length = 0;
+ throw new Error(err.join("\n").trim());
+ }
+
+ var _bb = new ByteBuffer(Bun._wasmPtrToSlice(resp_ptr));
+
+ const response = decodeGetTestsResponse(_bb);
+ var tests = new Array(response.tests.length);
+
+ for (var i = 0; i < response.tests.length; i++) {
+ tests[i] = {
+ name: new TextDecoder().decode(
+ response.contents.subarray(
+ response.tests[i].label.offset,
+ response.tests[i].label.offset + response.tests[i].label.length,
+ ),
+ ),
+ byteOffset: response.tests[i].byteOffset,
+ kind: testKindMap[response.tests[i].kind],
+ };
}
- // if (process.env.NODE_ENV === "development") {
- // console.time("[Bun] Transform " + file_name);
- // }
+ Bun.wasm_exports.bun_free(resp_ptr);
+
+ return tests;
+ }
+ static transformSync(content: Uint8Array | string, file_name: string, loader?: Loader): TransformResponse {
const bb = new ByteBuffer(scratch);
bb.length = 0;
bb.index = 0;
@@ -274,28 +342,14 @@ export class Bun {
buffer.set(data);
const resp_ptr = Bun.wasm_exports.transform(input_ptr);
-
var _bb = new ByteBuffer(Bun._wasmPtrToSlice(resp_ptr));
-
const response = decodeTransformResponse(_bb);
Bun.wasm_exports.bun_free(input_ptr);
scratch = bb.data;
return response;
}
- static scan(
- content: Uint8Array | string,
- file_name: string,
- loader?: Loader,
- ): ScanResult {
- if (!Bun.has_initialized) {
- throw "Please run await Bun.init(wasm_url) before using this.";
- }
-
- // if (process.env.NODE_ENV === "development") {
- // console.time("[Bun] Transform " + file_name);
- // }
- scratch.fill(0);
+ static scan(content: Uint8Array | string, file_name: string, loader?: Loader): ScanResult {
const bb = new ByteBuffer(scratch);
bb.length = 0;
bb.index = 0;
@@ -337,15 +391,5 @@ export class Bun {
export const transformSync = Bun.transformSync;
export const scan = Bun.scan;
export const init = Bun.init;
+export const getTests = Bun.getTests;
export default Bun;
-
-if ("window" in globalThis && !("Bun" in globalThis)) {
- // @ts-ignore-line
- globalThis.Bun = Bun;
-}
-
-//@ts-ignore
-if (process.env.NODE_ENV === "development") {
- //@ts-ignore
- Bun.env = env;
-}
diff --git a/packages/bun-wasm/test/node.mjs b/packages/bun-wasm/test/node.mjs
index 7890a5c6e..2699a1dad 100644
--- a/packages/bun-wasm/test/node.mjs
+++ b/packages/bun-wasm/test/node.mjs
@@ -1,28 +1,7 @@
-// This is not released yet because there are some memory bugs with the WASM build
-// It causes syntax errors which don't make any sense
-// Most likely, this is an issue with some code expecting 64 bit pointers
-// That's also why this test just prints instead of using a test runner
-import * as Bun from "../index.mjs";
+import { readFileSync } from "fs";
+import { init, getTests } from "../index.mjs";
-await Bun.init(new URL("../bun.wasm", import.meta.url));
+const buf = (process.argv.length > 2 ? readFileSync(process.argv.at(-1)) : "") || readFileSync(import.meta.url);
+await init(new URL("../bun.wasm", import.meta.url));
-const buf =
- (process.argv.length > 2 ? process.argv.at(-1) : "") ||
- new TextEncoder().encode(`
-
-export function hi() {
- return <div>Hey</div>;
-}
-
-`);
-const result = Bun.transformSync(buf, "hi.jsx", "jsx");
-if (result.errors?.length) {
- console.log(JSON.stringify(result.errors, null, 2));
- throw new Error("Failed");
-}
-
-if (!result.files.length) {
- throw new Error("unexpectedly empty");
-}
-
-process.stdout.write(result.files[0].data);
+console.log(getTests(buf));
diff --git a/root.zig b/root.zig
index 0d00409eb..0fc0d3987 100644
--- a/root.zig
+++ b/root.zig
@@ -17,3 +17,6 @@ pub const completions = struct {
pub const zsh = @embedFile("./completions/bun.zsh");
pub const fish = @embedFile("./completions/bun.fish");
};
+
+pub const JavaScriptCore = @import("./src/jsc.zig");
+pub const C = @import("./src/c.zig");
diff --git a/root_wasm.zig b/root_wasm.zig
new file mode 100644
index 000000000..f04ceb015
--- /dev/null
+++ b/root_wasm.zig
@@ -0,0 +1,26 @@
+pub usingnamespace @import("src/main_wasm.zig");
+
+pub const bun = @import("src/bun.zig");
+
+pub const content = struct {
+ pub const error_js_path = "packages/bun-error/dist/index.js";
+ pub const error_js = @embedFile(error_js_path);
+
+ pub const error_css_path = "packages/bun-error/dist/bun-error.css";
+ pub const error_css_path_dev = "packages/bun-error/bun-error.css";
+
+ pub const error_css = @embedFile(error_css_path);
+};
+
+pub const completions = struct {};
+pub const is_bindgen = true;
+pub const JavaScriptCore = struct {
+ pub fn markBinding(_: @import("std").builtin.SourceLocation) void {
+ unreachable;
+ }
+
+ pub const ZigString = struct {};
+};
+
+pub const C = struct {};
+pub const build_options = @import("build_options");
diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts
index 2a86340ad..a982b910d 100644
--- a/src/api/schema.d.ts
+++ b/src/api/schema.d.ts
@@ -331,6 +331,16 @@ export const WebsocketCommandKindKeys: {
3: "build_with_file_path";
build_with_file_path: "build_with_file_path";
};
+export const enum TestKind {
+ test_fn = 1,
+ describe_fn = 2,
+}
+export const TestKindKeys: {
+ 1: "test_fn";
+ test_fn: "test_fn";
+ 2: "describe_fn";
+ describe_fn: "describe_fn";
+};
export interface StackFrame {
function_name: string;
file: string;
@@ -729,6 +739,22 @@ export interface ClientServerModuleManifest {
contents: Uint8Array;
}
+export interface GetTestsRequest {
+ path: string;
+ contents: Uint8Array;
+}
+
+export interface TestResponseItem {
+ byteOffset: int32;
+ label: StringPointer;
+ kind: TestKind;
+}
+
+export interface GetTestsResponse {
+ tests: TestResponseItem[];
+ contents: Uint8Array;
+}
+
export declare function encodeStackFrame(message: StackFrame, bb: ByteBuffer): void;
export declare function decodeStackFrame(buffer: ByteBuffer): StackFrame;
export declare function encodeStackFramePosition(message: StackFramePosition, bb: ByteBuffer): void;
@@ -847,3 +873,9 @@ export declare function encodeClientServerModule(message: ClientServerModule, bb
export declare function decodeClientServerModule(buffer: ByteBuffer): ClientServerModule;
export declare function encodeClientServerModuleManifest(message: ClientServerModuleManifest, bb: ByteBuffer): void;
export declare function decodeClientServerModuleManifest(buffer: ByteBuffer): ClientServerModuleManifest;
+export declare function encodeGetTestsRequest(message: GetTestsRequest, bb: ByteBuffer): void;
+export declare function decodeGetTestsRequest(buffer: ByteBuffer): GetTestsRequest;
+export declare function encodeTestResponseItem(message: TestResponseItem, bb: ByteBuffer): void;
+export declare function decodeTestResponseItem(buffer: ByteBuffer): TestResponseItem;
+export declare function encodeGetTestsResponse(message: GetTestsResponse, bb: ByteBuffer): void;
+export declare function decodeGetTestsResponse(buffer: ByteBuffer): GetTestsResponse;
diff --git a/src/api/schema.js b/src/api/schema.js
index f1e68031e..b28c5669c 100644
--- a/src/api/schema.js
+++ b/src/api/schema.js
@@ -3315,6 +3315,108 @@ function encodeClientServerModuleManifest(message, bb) {
}
}
+function decodeGetTestsRequest(bb) {
+ var result = {};
+
+ result["path"] = bb.readString();
+ result["contents"] = bb.readByteArray();
+ return result;
+}
+
+function encodeGetTestsRequest(message, bb) {
+ var value = message["path"];
+ if (value != null) {
+ bb.writeString(value);
+ } else {
+ throw new Error('Missing required field "path"');
+ }
+
+ var value = message["contents"];
+ if (value != null) {
+ bb.writeByteArray(value);
+ } else {
+ throw new Error('Missing required field "contents"');
+ }
+}
+const TestKind = {
+ "1": 1,
+ "2": 2,
+ "test_fn": 1,
+ "describe_fn": 2,
+};
+const TestKindKeys = {
+ "1": "test_fn",
+ "2": "describe_fn",
+ "test_fn": "test_fn",
+ "describe_fn": "describe_fn",
+};
+
+function decodeTestResponseItem(bb) {
+ var result = {};
+
+ result["byteOffset"] = bb.readInt32();
+ result["label"] = decodeStringPointer(bb);
+ result["kind"] = TestKind[bb.readByte()];
+ return result;
+}
+
+function encodeTestResponseItem(message, bb) {
+ var value = message["byteOffset"];
+ if (value != null) {
+ bb.writeInt32(value);
+ } else {
+ throw new Error('Missing required field "byteOffset"');
+ }
+
+ var value = message["label"];
+ if (value != null) {
+ encodeStringPointer(value, bb);
+ } else {
+ throw new Error('Missing required field "label"');
+ }
+
+ var value = message["kind"];
+ if (value != null) {
+ var encoded = TestKind[value];
+ if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "TestKind"');
+ bb.writeByte(encoded);
+ } else {
+ throw new Error('Missing required field "kind"');
+ }
+}
+
+function decodeGetTestsResponse(bb) {
+ var result = {};
+
+ var length = bb.readVarUint();
+ var values = (result["tests"] = Array(length));
+ for (var i = 0; i < length; i++) values[i] = decodeTestResponseItem(bb);
+ result["contents"] = bb.readByteArray();
+ return result;
+}
+
+function encodeGetTestsResponse(message, bb) {
+ var value = message["tests"];
+ if (value != null) {
+ var values = value,
+ n = values.length;
+ bb.writeVarUint(n);
+ for (var i = 0; i < n; i++) {
+ value = values[i];
+ encodeTestResponseItem(value, bb);
+ }
+ } else {
+ throw new Error('Missing required field "tests"');
+ }
+
+ var value = message["contents"];
+ if (value != null) {
+ bb.writeByteArray(value);
+ } else {
+ throw new Error('Missing required field "contents"');
+ }
+}
+
export { Loader };
export { LoaderKeys };
export { FrameworkEntryPointType };
@@ -3461,3 +3563,11 @@ export { decodeClientServerModule };
export { encodeClientServerModule };
export { decodeClientServerModuleManifest };
export { encodeClientServerModuleManifest };
+export { decodeGetTestsRequest };
+export { encodeGetTestsRequest };
+export { TestKind };
+export { TestKindKeys };
+export { decodeTestResponseItem };
+export { encodeTestResponseItem };
+export { decodeGetTestsResponse };
+export { encodeGetTestsResponse };
diff --git a/src/api/schema.peechy b/src/api/schema.peechy
index a172606f7..53800e4f3 100644
--- a/src/api/schema.peechy
+++ b/src/api/schema.peechy
@@ -609,3 +609,25 @@ struct ClientServerModuleManifest {
StringPointer[] exportNames;
byte[] contents;
}
+
+
+struct GetTestsRequest {
+ string path;
+ byte[] contents;
+}
+
+smol TestKind {
+ test_fn = 1;
+ describe_fn = 2;
+}
+
+struct TestResponseItem {
+ int32 byteOffset;
+ StringPointer label;
+ TestKind kind;
+}
+
+struct GetTestsResponse {
+ TestResponseItem[] tests;
+ byte[] contents;
+} \ No newline at end of file
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 94f2d22df..de46a813b 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -3140,4 +3140,87 @@ pub const Api = struct {
try writer.writeArray(u8, this.contents);
}
};
+
+ pub const GetTestsRequest = struct {
+ /// path
+ path: []const u8,
+
+ /// contents
+ contents: []const u8,
+
+ pub fn decode(reader: anytype) anyerror!GetTestsRequest {
+ var this = std.mem.zeroes(GetTestsRequest);
+
+ this.path = try reader.readValue([]const u8);
+ this.contents = try reader.readArray(u8);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(@TypeOf(this.path), this.path);
+ try writer.writeArray(u8, this.contents);
+ }
+ };
+
+ pub const TestKind = enum(u8) {
+ _none,
+ /// test_fn
+ test_fn,
+
+ /// describe_fn
+ describe_fn,
+
+ _,
+
+ pub fn jsonStringify(self: *const @This(), opts: anytype, o: anytype) !void {
+ return try std.json.stringify(@tagName(self), opts, o);
+ }
+ };
+
+ pub const TestResponseItem = struct {
+ /// byteOffset
+ byte_offset: i32 = 0,
+
+ /// label
+ label: StringPointer,
+
+ /// kind
+ kind: TestKind,
+
+ pub fn decode(reader: anytype) anyerror!TestResponseItem {
+ var this = std.mem.zeroes(TestResponseItem);
+
+ this.byte_offset = try reader.readValue(i32);
+ this.label = try reader.readValue(StringPointer);
+ this.kind = try reader.readValue(TestKind);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.byte_offset);
+ try writer.writeValue(@TypeOf(this.label), this.label);
+ try writer.writeEnum(this.kind);
+ }
+ };
+
+ pub const GetTestsResponse = struct {
+ /// tests
+ tests: []const TestResponseItem,
+
+ /// contents
+ contents: []const u8,
+
+ pub fn decode(reader: anytype) anyerror!GetTestsResponse {
+ var this = std.mem.zeroes(GetTestsResponse);
+
+ this.tests = try reader.readArray(TestResponseItem);
+ this.contents = try reader.readArray(u8);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeArray(TestResponseItem, this.tests);
+ try writer.writeArray(u8, this.contents);
+ }
+ };
};
diff --git a/src/bun.zig b/src/bun.zig
index c26fb4781..b274efaaf 100644
--- a/src/bun.zig
+++ b/src/bun.zig
@@ -24,7 +24,7 @@ pub const huge_allocator_threshold: comptime_int = @import("./memory_allocator.z
/// FileSystem is a singleton.
pub const fs_allocator = default_allocator;
-pub const C = @import("c.zig");
+pub const C = @import("root").C;
pub const sha = @import("./sha.zig");
pub const FeatureFlags = @import("feature_flags.zig");
pub const meta = @import("./meta.zig");
@@ -486,8 +486,12 @@ pub fn copy(comptime Type: type, dest: []Type, src: []const Type) void {
if (!does_input_or_output_overlap) {
@memcpy(output[0..input.len], input);
- } else {
+ } else if (comptime Environment.isNative) {
C.memmove(output.ptr, input.ptr, input.len);
+ } else {
+ for (input, output) |input_byte, *out| {
+ out.* = input_byte;
+ }
}
}
@@ -700,7 +704,7 @@ pub const invalid_fd = std.math.maxInt(FileDescriptor);
pub const simdutf = @import("./bun.js/bindings/bun-simdutf.zig");
-pub const JSC = @import("./jsc.zig");
+pub const JSC = @import("root").JavaScriptCore;
pub const AsyncIO = @import("async_io");
pub const logger = @import("./logger.zig");
@@ -725,6 +729,10 @@ pub const MimallocArena = @import("./mimalloc_arena.zig").Arena;
/// This wrapper exists to avoid the call to sliceTo(0)
/// Zig's sliceTo(0) is scalar
pub fn getenvZ(path_: [:0]const u8) ?[]const u8 {
+ if (comptime !Environment.isNative) {
+ return null;
+ }
+
const ptr = std.c.getenv(path_.ptr) orelse return null;
return sliceTo(ptr, 0);
}
@@ -868,6 +876,9 @@ pub const copyFileRange = CopyFile.copyFileRange;
pub const copyFile = CopyFile.copyFile;
pub fn parseDouble(input: []const u8) !f64 {
+ if (comptime Environment.isWasm) {
+ return try std.fmt.parseFloat(f64, input);
+ }
return JSC.WTF.parseDouble(input);
}
diff --git a/src/deps/mimalloc b/src/deps/mimalloc
-Subproject abfd4236d4e00136cd23c862ab880ce894899b9
+Subproject 7968d4285043401bb36573374710d47a4081a06
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index 553bb8f62..130e07d33 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -112,7 +112,7 @@ pub const hardcode_localhost_to_127_0_0_1 = false;
/// so we just disable it
pub const support_jsxs_in_jsx_transform = false;
-pub const use_simdutf = !@import("root").bun.JSC.is_bindgen;
+pub const use_simdutf = @import("root").bun.Environment.isNative and !@import("root").bun.JSC.is_bindgen;
pub const inline_properties_in_transpiler = true;
diff --git a/src/js_ast.zig b/src/js_ast.zig
index b37e93be5..e7466554f 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -1704,17 +1704,19 @@ pub const E = struct {
}
pub fn toStringFromF64Safe(value: f64, allocator: std.mem.Allocator) ?string {
- if (value == @trunc(value) and (value < std.math.maxInt(i32) and value > std.math.minInt(i32))) {
- const int_value = @as(i64, @intFromFloat(value));
- const abs = @as(u64, @intCast(std.math.absInt(int_value) catch return null));
- if (abs < double_digit.len) {
- return if (int_value < 0)
- neg_double_digit[abs]
- else
- double_digit[abs];
- }
+ if (comptime !Environment.isWasm) {
+ if (value == @trunc(value) and (value < std.math.maxInt(i32) and value > std.math.minInt(i32))) {
+ const int_value = @as(i64, @intFromFloat(value));
+ const abs = @as(u64, @intCast(std.math.absInt(int_value) catch return null));
+ if (abs < double_digit.len) {
+ return if (int_value < 0)
+ neg_double_digit[abs]
+ else
+ double_digit[abs];
+ }
- return std.fmt.allocPrint(allocator, "{d}", .{@as(i32, @intCast(int_value))}) catch return null;
+ return std.fmt.allocPrint(allocator, "{d}", .{@as(i32, @intCast(int_value))}) catch return null;
+ }
}
if (std.math.isNan(value)) {
@@ -2250,6 +2252,11 @@ pub const E = struct {
}
if (s.isUTF8()) {
+ if (comptime !Environment.isNative) {
+ var allocated = (strings.toUTF16Alloc(bun.default_allocator, s.data, false) catch return 0) orelse return s.data.len;
+ defer bun.default_allocator.free(allocated);
+ return @as(u32, @truncate(allocated.len));
+ }
return @as(u32, @truncate(bun.simdutf.length.utf16.from.utf8.le(s.data)));
}
@@ -4134,6 +4141,14 @@ pub const Expr = struct {
return @as(Tag, this.data).isPrimitiveLiteral();
}
+ pub fn isRef(this: Expr, ref: Ref) bool {
+ return switch (this.data) {
+ .e_import_identifier => |import_identifier| import_identifier.ref.eql(ref),
+ .e_identifier => |ident| ident.ref.eql(ref),
+ else => false,
+ };
+ }
+
pub const Tag = enum(u6) {
e_array,
e_unary,
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index e54e738e0..1417ea01e 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -734,7 +734,7 @@ fn NewLexer_(
needs_slow_path = true;
} else if (is_json and lexer.code_point < 0x20) {
try lexer.syntaxError();
- } else if (comptime quote == '"' or quote == '\'') {
+ } else if (comptime (quote == '"' or quote == '\'') and Environment.isNative) {
const remainder = lexer.source.contents[lexer.current..];
if (remainder.len >= 4096) {
lexer.current += indexOfInterestingCharacterInStringLiteral(remainder, quote) orelse {
diff --git a/src/js_parser.zig b/src/js_parser.zig
index 2ce10a77d..976642eb9 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -46,6 +46,12 @@ fn _disabledAssert(_: bool) void {
unreachable;
}
+fn __wrapSyntaxError(loc: @import("std").builtin.SourceLocation) void {
+ Output.print("SyntaxError at {d}:{d}", .{
+ loc.line,
+ loc.column,
+ });
+}
const assert = if (Environment.allow_assert) std.debug.assert else _disabledAssert;
const debug = Output.scoped(.JSParser, false);
const ExprListLoc = struct {
@@ -386,6 +392,7 @@ const JSXTag = struct {
if (strings.indexOfChar(member, '-')) |index| {
try p.log.addError(p.source, logger.Loc{ .start = member_range.loc.start + @as(i32, @intCast(index)) }, "Unexpected \"-\"");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -2810,6 +2817,74 @@ pub const Parser = struct {
}
}
+ pub fn analyze(self: *Parser, context: *anyopaque, callback: *const fn (*anyopaque, *TSXParser, []js_ast.Part) anyerror!void) anyerror!void {
+ var p: TSXParser = undefined;
+ try TSXParser.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
+ p.should_fold_typescript_constant_expressions = false;
+
+ defer p.lexer.deinit();
+
+ // Consume a leading hashbang comment
+ var hashbang: string = "";
+ if (p.lexer.token == .t_hashbang) {
+ hashbang = p.lexer.identifier;
+ try p.lexer.next();
+ }
+
+ // Parse the file in the first pass, but do not bind symbols
+ var opts = ParseStatementOptions{ .is_module_scope = true };
+ const parse_tracer = bun.tracy.traceNamed(@src(), "JSParser.parse");
+
+ const stmts = p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts) catch |err| {
+ if (comptime Environment.isWasm) {
+ Output.print("JSParser.parse: caught error {s} at location: {d}\n", .{ @errorName(err), p.lexer.loc().start });
+ p.log.printForLogLevel(Output.writer()) catch {};
+ }
+ return err;
+ };
+
+ parse_tracer.end();
+
+ if (self.log.errors > 0) {
+ if (comptime Environment.isWasm) {
+ // If the logger is backed by console.log, every print appends a newline.
+ // so buffering is kind of mandatory here
+ const fakeWriter = struct {
+ fn writeAll(_: @This(), data: []const u8) anyerror!usize {
+ if (data.len == 0) return 0;
+
+ Output.print("{s}", .{data});
+ return data.len;
+ }
+ };
+ var writer = std.io.Writer(fakeWriter, anyerror, fakeWriter.writeAll){
+ .context = fakeWriter{},
+ };
+ var buffered_writer = std.io.bufferedWriter(writer);
+ var actual = buffered_writer.writer();
+ for (self.log.msgs.items) |msg| {
+ var m: logger.Msg = msg;
+ m.writeFormat(actual, true) catch {};
+ }
+ buffered_writer.flush() catch {};
+ }
+ return error.SyntaxError;
+ }
+
+ const visit_tracer = bun.tracy.traceNamed(@src(), "JSParser.visit");
+ try p.prepareForVisitPass();
+
+ var parts = ListManaged(js_ast.Part).init(p.allocator);
+ defer parts.deinit();
+
+ try p.appendPart(&parts, stmts);
+ visit_tracer.end();
+
+ const analyze_tracer = bun.tracy.traceNamed(@src(), "JSParser.analyze");
+ try callback(context, &p, parts.items);
+ analyze_tracer.end();
+ }
+
fn _parse(self: *Parser, comptime ParserType: type) !js_ast.Result {
var p: ParserType = undefined;
const orig_error_count = self.log.errors;
@@ -2858,6 +2933,7 @@ pub const Parser = struct {
// Example where NOT halting causes a crash: A TS enum with a number literal as a member name
// https://discord.com/channels/876711213126520882/876711213126520885/1039325382488371280
if (self.log.errors > orig_error_count) {
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -3015,6 +3091,7 @@ pub const Parser = struct {
// If there were errors while visiting, also halt here
if (self.log.errors > orig_error_count) {
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -4287,11 +4364,10 @@ pub const Parser = struct {
}
pub fn init(_options: Options, log: *logger.Log, source: *const logger.Source, define: *Define, allocator: Allocator) !Parser {
- const lexer = try js_lexer.Lexer.init(log, source.*, allocator);
return Parser{
.options = _options,
.allocator = allocator,
- .lexer = lexer,
+ .lexer = try js_lexer.Lexer.init(log, source.*, allocator),
.define = define,
.source = source,
.log = log,
@@ -8100,6 +8176,7 @@ fn NewParser_(
else => {
if (!found_key) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
},
@@ -8112,6 +8189,7 @@ fn NewParser_(
else => {
if (!p.lexer.has_newline_before) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
},
@@ -8488,6 +8566,7 @@ fn NewParser_(
// example:
// export class {}
if (!is_identifier) {
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -8617,6 +8696,7 @@ fn NewParser_(
p.esm_export_keyword = p.lexer.range();
} else if (!opts.is_namespace_scope) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
try p.lexer.next();
@@ -8650,12 +8730,14 @@ fn NewParser_(
}
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
T.t_enum => {
if (!is_typescript_enabled) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -8728,12 +8810,14 @@ fn NewParser_(
}
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
T.t_default => {
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -8883,6 +8967,7 @@ fn NewParser_(
T.t_asterisk => {
if (!opts.is_module_scope and !(opts.is_namespace_scope or !opts.is_typescript_declare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -8936,6 +9021,7 @@ fn NewParser_(
T.t_open_brace => {
if (!opts.is_module_scope and !(opts.is_namespace_scope or !opts.is_typescript_declare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -9013,10 +9099,12 @@ fn NewParser_(
return p.s(S.ExportEquals{ .value = value }, loc);
}
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
else => {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
}
@@ -9029,6 +9117,7 @@ fn NewParser_(
.t_enum => {
if (!is_typescript_enabled) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
return p.parseTypescriptEnumStmt(loc, opts);
@@ -9072,6 +9161,7 @@ fn NewParser_(
// notimpl();
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
.t_class => {
@@ -9198,6 +9288,7 @@ fn NewParser_(
if (p.lexer.token == .t_default) {
if (foundDefault) {
try p.log.addRangeError(p.source, p.lexer.range(), "Multiple default clauses are not allowed");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -9387,6 +9478,7 @@ fn NewParser_(
if (p.lexer.isContextualKeyword("of") or isForAwait) {
if (bad_let_range) |r| {
try p.log.addRangeError(p.source, r, "\"let\" must be wrapped in parentheses to be used as an expression here");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -9395,6 +9487,7 @@ fn NewParser_(
try p.lexer.expectedString("\"of\"");
} else {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
}
@@ -9480,6 +9573,7 @@ fn NewParser_(
// "import 'path'"
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
was_originally_bare_import = true;
@@ -9488,6 +9582,7 @@ fn NewParser_(
// "import * as ns from 'path'"
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -9505,6 +9600,7 @@ fn NewParser_(
// "import {item1, item2} from 'path'"
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
var importClause = try p.parseImportClause();
@@ -9530,6 +9626,7 @@ fn NewParser_(
// "import foo = bar"
if (!opts.is_module_scope and (!opts.is_namespace_scope)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -9615,6 +9712,7 @@ fn NewParser_(
},
else => {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
}
@@ -9624,6 +9722,7 @@ fn NewParser_(
},
else => {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
}
@@ -9669,6 +9768,7 @@ fn NewParser_(
try p.log.addError(p.source, logger.Loc{
.start = loc.start + 5,
}, "Unexpected newline after \"throw\"");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
const expr = try p.parseExpr(.lowest);
@@ -10441,6 +10541,7 @@ fn NewParser_(
// Commas after spread elements are not allowed
if (has_spread and p.lexer.token == .t_comma) {
p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable;
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
}
@@ -10488,6 +10589,7 @@ fn NewParser_(
// Commas after spread elements are not allowed
if (property.flags.contains(.is_spread) and p.lexer.token == .t_comma) {
p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable;
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -10939,6 +11041,7 @@ fn NewParser_(
if (first_non_identifier_loc.start != 0 and !p.lexer.isContextualKeyword("from")) {
const r = js_lexer.rangeOfIdentifier(p.source, first_non_identifier_loc);
try p.lexer.addRangeError(r, "Expected identifier but found \"{s}\"", .{p.source.textForRange(r)}, true);
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -11390,6 +11493,7 @@ fn NewParser_(
// Newlines are not allowed before "=>"
if (p.lexer.has_newline_before) {
try p.log.addRangeError(p.source, p.lexer.range(), "Unexpected newline before \"=>\"");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -11880,6 +11984,7 @@ fn NewParser_(
if (isStar) {
if (p.lexer.has_newline_before) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
try p.lexer.next();
@@ -11964,6 +12069,7 @@ fn NewParser_(
.t_asterisk => {
if (kind != .normal or opts.is_generator) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -12734,6 +12840,7 @@ fn NewParser_(
// "a?.<T>()"
if (comptime !is_typescript_enabled) {
try p.lexer.expected(.t_identifier);
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -12885,6 +12992,7 @@ fn NewParser_(
{
if (errors == null) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
errors.?.invalid_expr_after_question = p.lexer.range();
@@ -12916,6 +13024,7 @@ fn NewParser_(
if (!is_typescript_enabled) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13191,6 +13300,7 @@ fn NewParser_(
// Prevent "||" inside "??" from the right
if (level.eql(.nullish_coalescing)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13203,6 +13313,7 @@ fn NewParser_(
if (p.lexer.token == .t_question_question) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
}
@@ -13223,6 +13334,7 @@ fn NewParser_(
// Prevent "&&" inside "??" from the right
if (level.eql(.nullish_coalescing)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13235,6 +13347,7 @@ fn NewParser_(
if (p.lexer.token == .t_question_question) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
}
@@ -13508,6 +13621,7 @@ fn NewParser_(
.t_private_identifier => {
if (!p.allow_private_identifiers or !p.allow_in or level.gte(.compare)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13556,6 +13670,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == T.t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13667,6 +13782,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13680,6 +13796,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13690,6 +13807,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
if (value.data == .e_index) {
@@ -13708,6 +13826,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13718,6 +13837,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13728,6 +13848,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13738,6 +13859,7 @@ fn NewParser_(
const value = try p.parseExpr(.prefix);
if (p.lexer.token == .t_asterisk_asterisk) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -13800,6 +13922,7 @@ fn NewParser_(
if (p.lexer.token != .t_identifier or !strings.eqlComptime(p.lexer.raw(), "target")) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
const range = logger.Range{ .loc = loc, .len = p.lexer.range().end().start - loc.start };
@@ -14067,17 +14190,21 @@ fn NewParser_(
}
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
.t_import => {
try p.lexer.next();
return p.parseImportExpr(loc, level);
},
- else => {
+ else => |tok| {
+ Output.print("Unexpected {s}", .{@tagName(tok)});
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
}
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -14340,6 +14467,7 @@ fn NewParser_(
// If we get here, it's invalid
try p.log.addError(p.source, expr.loc, "Invalid JSX prop shorthand, must be identifier, dot or string");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
};
@@ -14394,6 +14522,7 @@ fn NewParser_(
const r = p.lexer.range();
// Not dealing with this right now.
try p.log.addRangeError(p.source, r, "Invalid JSX escape - use XML entity codes quotes or pass a JavaScript string instead");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -14475,6 +14604,7 @@ fn NewParser_(
end_tag.name,
tag.name,
});
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -14497,6 +14627,7 @@ fn NewParser_(
},
else => {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
},
}
@@ -15739,7 +15870,7 @@ fn NewParser_(
return p.newExpr(
// Use libc fmod here to be consistent with what JavaScriptCore does
// https://github.com/oven-sh/WebKit/blob/7a0b13626e5db69aa5a32d037431d381df5dfb61/Source/JavaScriptCore/runtime/MathCommon.cpp#L574-L597
- E.Number{ .value = bun.C.fmod(vals[0], vals[1]) },
+ E.Number{ .value = if (comptime Environment.isNative) bun.C.fmod(vals[0], vals[1]) else std.math.mod(f64, vals[0], vals[1]) catch 0 },
expr.loc,
);
}
@@ -20807,6 +20938,7 @@ fn NewParser_(
// Arrow functions are not allowed inside certain expressions
if (level.gt(.assign)) {
try p.lexer.unexpected();
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -20879,6 +21011,7 @@ fn NewParser_(
// If this isn't an arrow function, then types aren't allowed
if (type_colon_range.len > 0) {
try p.log.addRangeError(p.source, type_colon_range, "Unexpected \":\"");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -20894,6 +21027,7 @@ fn NewParser_(
p.logExprErrors(&errors);
if (spread_range.len > 0) {
try p.log.addRangeError(p.source, type_colon_range, "Unexpected \"...\"");
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -20904,6 +21038,7 @@ fn NewParser_(
// Indicate that we expected an arrow function
try p.lexer.expected(.t_equals_greater_than);
+ __wrapSyntaxError(@src());
return error.SyntaxError;
}
@@ -21997,7 +22132,7 @@ const JSXParser = if (bun.fast_debug_build_mode)
TSXParser
else
NewParser(.{ .jsx = .react });
-const TSXParser = NewParser(.{ .jsx = .react, .typescript = true });
+pub const TSXParser = NewParser(.{ .jsx = .react, .typescript = true });
const TypeScriptParser = NewParser(.{ .typescript = true });
const JSParserMacro = if (bun.fast_debug_build_mode)
TSParserMacro
diff --git a/src/logger.zig b/src/logger.zig
index 5a9e5b973..21a58243c 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -561,7 +561,7 @@ pub const Msg = struct {
try writer.print("{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{
msg.kind.string(),
msg.data.text,
- location.line_text,
+ location.line_text orelse "",
location.file,
location.line,
location.column,
diff --git a/src/main_wasm.zig b/src/main_wasm.zig
index d58ce60a0..e7fd55d80 100644
--- a/src/main_wasm.zig
+++ b/src/main_wasm.zig
@@ -1,3 +1,4 @@
+const bun = @import("root").bun;
const JSParser = bun.js_parser;
const JSPrinter = bun.js_printer;
const JSAst = bun.JSAst;
@@ -181,26 +182,325 @@ var output_stream = std.io.fixedBufferStream(&output_stream_buf);
var error_stream_buf: [16384]u8 = undefined;
var error_stream = std.io.fixedBufferStream(&error_stream_buf);
var output_source: global.Output.Source = undefined;
-export fn init() void {
+var init_counter: usize = 0;
+export fn init(heapsize: u32) void {
const Mimalloc = @import("./allocators/mimalloc.zig");
- // reserve 256 MB upfront
- Mimalloc.mi_option_set(Mimalloc.mi_option_t.allow_decommit, 0);
- Mimalloc.mi_option_set(Mimalloc.mi_option_t.limit_os_alloc, 1);
- _ = Mimalloc.mi_reserve_os_memory(2.56e+8, false, true);
-
- output_source = global.Output.Source.init(output_stream, error_stream);
- global.Output.Source.set(&output_source);
- JSAst.Stmt.Data.Store.create(default_allocator);
- JSAst.Expr.Data.Store.create(default_allocator);
- buffer_writer = JSPrinter.BufferWriter.init(default_allocator) catch unreachable;
- buffer_writer.buffer.growBy(1024) catch unreachable;
- writer = JSPrinter.BufferPrinter.init(buffer_writer);
- define = Define.Define.init(default_allocator, null, null) catch unreachable;
+ defer init_counter +%= 1;
+ if (init_counter == 0) {
+
+ // reserve 256 MB upfront
+ Mimalloc.mi_option_set(.allow_decommit, 0);
+ Mimalloc.mi_option_set(.limit_os_alloc, 1);
+ _ = Mimalloc.mi_reserve_os_memory(heapsize, false, true);
+
+ JSAst.Stmt.Data.Store.create(default_allocator);
+ JSAst.Expr.Data.Store.create(default_allocator);
+ buffer_writer = JSPrinter.BufferWriter.init(default_allocator) catch unreachable;
+ buffer_writer.buffer.growBy(1024) catch unreachable;
+ writer = JSPrinter.BufferPrinter.init(buffer_writer);
+ define = Define.Define.init(default_allocator, null, null) catch unreachable;
+ output_source = global.Output.Source.init(output_stream, error_stream);
+ global.Output.Source.set(&output_source);
+ } else {
+ buffer_writer = writer.ctx;
+ }
}
const Arena = @import("./mimalloc_arena.zig").Arena;
var log: Logger.Log = undefined;
+const TestAnalyzer = struct {
+ string_buffer: std.ArrayList(u8),
+ items: std.ArrayList(Api.TestResponseItem),
+
+ pub fn visitExpr(this: *TestAnalyzer, parser: *bun.js_parser.TSXParser, expr: JSAst.Expr) !void {
+ switch (expr.data) {
+ .e_call => |call| {
+ if (call.target.isRef(parser.jest.@"test") or call.target.isRef(parser.jest.it) or call.target.isRef(parser.jest.describe)) {
+ if (call.args.len > 0) {
+ const label_expr: JSAst.Expr = call.args.slice()[0];
+ switch (label_expr.data) {
+ .e_string => |str| {
+ try str.toUTF8(this.string_buffer.allocator);
+ const ptr = Api.StringPointer{
+ .offset = this.string_buffer.items.len,
+ .length = str.data.len,
+ };
+ try this.string_buffer.appendSlice(str.data);
+ try this.items.append(Api.TestResponseItem{
+ .byte_offset = expr.loc.start,
+ .kind = if (call.target.isRef(parser.jest.describe)) Api.TestKind.describe_fn else .test_fn,
+ .label = ptr,
+ });
+ },
+ .e_dot => {},
+ else => {},
+ }
+
+ return;
+ }
+ } else if (call.target.data == .e_dot and bun.strings.eqlComptime(call.target.data.e_dot.name, "only")) {
+ const target = call.target.data.e_dot.target;
+ if (target.isRef(parser.jest.@"test") or target.isRef(parser.jest.it) or target.isRef(parser.jest.describe)) {
+ if (call.args.len > 0) {
+ const label_expr: JSAst.Expr = call.args.slice()[0];
+ switch (label_expr.data) {
+ .e_string => |str| {
+ try str.toUTF8(this.string_buffer.allocator);
+ const ptr = Api.StringPointer{
+ .offset = this.string_buffer.items.len,
+ .length = str.data.len,
+ };
+ try this.string_buffer.appendSlice(str.data);
+ try this.items.append(Api.TestResponseItem{
+ .byte_offset = expr.loc.start,
+ .kind = if (target.isRef(parser.jest.describe)) Api.TestKind.describe_fn else .test_fn,
+ .label = ptr,
+ });
+ },
+ .e_dot => {},
+ else => {},
+ }
+
+ return;
+ }
+ }
+ }
+
+ try this.visitExpr(parser, call.target);
+ for (call.args.slice()) |arg| {
+ try this.visitExpr(parser, arg);
+ }
+ },
+ .e_binary => |bin| {
+ try this.visitExpr(parser, bin.left);
+ try this.visitExpr(parser, bin.right);
+ },
+ .e_new => |new| {
+ try this.visitExpr(parser, new.target);
+ for (new.args.slice()) |arg| {
+ try this.visitExpr(parser, arg);
+ }
+ },
+
+ .e_array => |arr| {
+ for (arr.items.slice()) |item| {
+ try this.visitExpr(parser, item);
+ }
+ },
+
+ .e_if => |if_| {
+ try this.visitExpr(parser, if_.no);
+ try this.visitExpr(parser, if_.test_);
+ try this.visitExpr(parser, if_.yes);
+ },
+
+ .e_function => |func| {
+ for (func.func.body.stmts) |stmt| {
+ try this.visitStmt(parser, stmt);
+ }
+ },
+
+ .e_arrow => |arrow| {
+ for (arrow.body.stmts) |stmt| {
+ try this.visitStmt(parser, stmt);
+ }
+ },
+ else => {},
+ }
+ }
+
+ pub fn visitStmt(this: *TestAnalyzer, parser: *bun.js_parser.TSXParser, stmt: JSAst.Stmt) anyerror!void {
+ switch (stmt.data) {
+ .s_block => |s| {
+ for (s.stmts) |s2| {
+ try this.visitStmt(parser, s2);
+ }
+ },
+ .s_do_while => |s| {
+ try this.visitStmt(parser, s.body);
+ try this.visitExpr(parser, s.test_);
+ },
+ .s_expr => |s| {
+ try this.visitExpr(parser, s.value);
+ },
+ .s_for_in => |s| {
+ try this.visitStmt(parser, s.init);
+ try this.visitStmt(parser, s.body);
+ try this.visitExpr(parser, s.value);
+ },
+ .s_for_of => |s| {
+ try this.visitStmt(parser, s.init);
+ try this.visitStmt(parser, s.body);
+ try this.visitExpr(parser, s.value);
+ },
+ .s_for => |s| {
+ if (s.init) |i| {
+ try this.visitStmt(parser, i);
+ }
+ if (s.test_) |i| {
+ try this.visitExpr(parser, i);
+ }
+ if (s.update) |i| {
+ try this.visitExpr(parser, i);
+ }
+
+ try this.visitStmt(parser, s.body);
+ },
+ .s_function => |s| {
+ for (s.func.args) |arg| {
+ if (arg.default) |def| {
+ try this.visitExpr(parser, def);
+ }
+ }
+
+ for (s.func.body.stmts) |s2| {
+ try this.visitStmt(parser, s2);
+ }
+ },
+ .s_if => |s| {
+ try this.visitExpr(parser, s.test_);
+ try this.visitStmt(parser, s.yes);
+ if (s.no) |no| {
+ try this.visitStmt(parser, no);
+ }
+ },
+ .s_local => |s| {
+ for (s.decls.slice()) |decl| {
+ if (decl.value) |val| {
+ try this.visitExpr(parser, val);
+ }
+ }
+ },
+ .s_switch => |s| {
+ try this.visitExpr(parser, s.test_);
+ for (s.cases) |c| {
+ for (c.body) |t| {
+ try this.visitStmt(parser, t);
+ }
+ if (c.value) |e2| {
+ try this.visitExpr(parser, e2);
+ }
+ }
+ },
+ .s_throw => |s| {
+ try this.visitExpr(parser, s.value);
+ },
+ .s_try => |s| {
+ for (s.body) |s2| {
+ try this.visitStmt(parser, s2);
+ }
+ if (s.catch_) |c| {
+ for (c.body) |s2| {
+ try this.visitStmt(parser, s2);
+ }
+ }
+ if (s.finally) |f| {
+ for (f.stmts) |s2| {
+ try this.visitStmt(parser, s2);
+ }
+ }
+ },
+ .s_while => |s| {
+ try this.visitExpr(parser, s.test_);
+ try this.visitStmt(parser, s.body);
+ },
+
+ .s_import => |import| {
+ if (bun.strings.eqlComptime(parser.import_records.items[import.import_record_index].path.text, "bun:test")) {
+ for (import.items) |item| {
+ const clause: bun.JSAst.ClauseItem = item;
+ if (bun.strings.eqlComptime(clause.alias, "test")) {
+ parser.jest.@"test" = clause.name.ref.?;
+ } else if (bun.strings.eqlComptime(clause.alias, "it")) {
+ parser.jest.it = clause.name.ref.?;
+ } else if (bun.strings.eqlComptime(clause.alias, "describe")) {
+ parser.jest.describe = clause.name.ref.?;
+ }
+ }
+ }
+ },
+ else => {},
+ }
+ }
+
+ pub fn visitParts(
+ this: *TestAnalyzer,
+ parser: *bun.js_parser.TSXParser,
+ parts: []bun.JSAst.Part,
+ ) anyerror!void {
+ var jest = &parser.jest;
+ if (parser.symbols.items[jest.it.innerIndex()].use_count_estimate == 0) {
+ if (parser.symbols.items[jest.it.innerIndex()].use_count_estimate > 0) {
+ jest.@"test" = jest.it;
+ }
+ } else if (parser.symbols.items[jest.@"test".innerIndex()].use_count_estimate == 0) {
+ if (parser.symbols.items[jest.it.innerIndex()].use_count_estimate > 0) {
+ jest.@"test" = jest.it;
+ }
+ }
+
+ for (parts) |part| {
+ for (part.stmts) |stmt| {
+ try this.visitStmt(parser, stmt);
+ }
+ }
+ }
+};
+export fn getTests(opts_array: u64) u64 {
+ var arena = Arena.init() catch unreachable;
+ var allocator = arena.allocator();
+ defer arena.deinit();
+ var log_ = Logger.Log.init(allocator);
+ var reader = ApiReader.init(Uint8Array.fromJS(opts_array), allocator);
+ var opts = Api.GetTestsRequest.decode(&reader) catch @panic("out of memory");
+ var code = Logger.Source.initPathString(if (opts.path.len > 0) opts.path else "my-test-file.test.tsx", opts.contents);
+ code.contents_is_recycled = true;
+ defer {
+ JSAst.Stmt.Data.Store.reset();
+ JSAst.Expr.Data.Store.reset();
+ }
+
+ var parser = JSParser.Parser.init(.{
+ .jsx = .{},
+ .ts = true,
+ }, &log_, &code, define, allocator) catch @panic("out of memory");
+
+ var anaylzer = TestAnalyzer{
+ .items = std.ArrayList(
+ Api.TestResponseItem,
+ ).init(allocator),
+ .string_buffer = std.ArrayList(
+ u8,
+ ).init(allocator),
+ };
+ parser.options.features.inject_jest_globals = true;
+ parser.options.features.commonjs_at_runtime = true;
+ parser.options.features.top_level_await = true;
+
+ parser.analyze(&anaylzer, @ptrCast(&TestAnalyzer.visitParts)) catch |err| {
+ Output.print("Error: {s}\n", .{@errorName(err)});
+
+ if (@errorReturnTrace()) |trace| {
+ Output.print("{}\n", .{trace});
+ }
+ log_.printForLogLevel(Output.writer()) catch unreachable;
+ return 0;
+ };
+
+ var output = std.ArrayList(u8).init(default_allocator);
+ var output_writer = output.writer();
+ const Encoder = ApiWriter(@TypeOf(output_writer));
+ var encoder = Encoder.init(output_writer);
+ var response = Api.GetTestsResponse{
+ .tests = anaylzer.items.items,
+ .contents = anaylzer.string_buffer.items,
+ };
+
+ response.encode(&encoder) catch return 0;
+ return @as(u64, @bitCast([2]u32{ @intFromPtr(output.items.ptr), output.items.len }));
+}
+
export fn transform(opts_array: u64) u64 {
// var arena = @import("root").bun.ArenaAllocator.init(default_allocator);
var arena = Arena.init() catch unreachable;
@@ -235,8 +535,8 @@ export fn transform(opts_array: u64) u64 {
parser.options.tree_shaking = false;
parser.options.features.top_level_await = true;
const result = parser.parse() catch unreachable;
- if (result.ok) {
- var symbols: [][]JSAst.Symbol = &([_][]JSAst.Symbol{result.ast.symbols});
+ if (result == .ast and log.errors == 0) {
+ var symbols = JSAst.Symbol.NestedList.init(&[_]JSAst.Symbol.List{result.ast.symbols});
_ = JSPrinter.printAst(
@TypeOf(&writer),
@@ -246,8 +546,6 @@ export fn transform(opts_array: u64) u64 {
&code,
false,
.{},
- void,
- null,
false,
) catch 0;
@@ -260,7 +558,7 @@ export fn transform(opts_array: u64) u64 {
}
transform_response = Api.TransformResponse{
- .status = if (result.ok) Api.TransformResponseStatus.success else Api.TransformResponseStatus.fail,
+ .status = if (result == .ast and log.errors == 0) Api.TransformResponseStatus.success else Api.TransformResponseStatus.fail,
.files = &output_files,
.errors = (log.toAPI(allocator) catch unreachable).msgs,
};
@@ -269,7 +567,7 @@ export fn transform(opts_array: u64) u64 {
var output_writer = output.writer();
const Encoder = ApiWriter(@TypeOf(output_writer));
var encoder = Encoder.init(output_writer);
- transform_response.encode(&encoder) catch unreachable;
+ transform_response.encode(&encoder) catch {};
return @as(u64, @bitCast([2]u32{ @intFromPtr(output.items.ptr), output.items.len }));
}
@@ -311,10 +609,10 @@ export fn scan(opts_array: u64) u64 {
var output_writer = output.writer();
const Encoder = ApiWriter(@TypeOf(output_writer));
- if (result.ok) {
+ if (result == .ast) {
var scanned_imports = allocator.alloc(Api.ScannedImport, result.ast.import_records.len) catch unreachable;
var scanned_i: usize = 0;
- for (result.ast.import_records) |import_record| {
+ for (result.ast.import_records.slice()) |import_record| {
if (import_record.kind == .internal) continue;
scanned_imports[scanned_i] = Api.ScannedImport{ .path = import_record.path.text, .kind = import_record.kind.toAPI() };
scanned_i += 1;
@@ -337,6 +635,7 @@ export fn emsc_main() void {
_ = transform;
_ = bun_free;
_ = bun_malloc;
+ _ = getTests;
}
comptime {
@@ -347,4 +646,5 @@ comptime {
_ = bun_free;
_ = scan;
_ = bun_malloc;
+ _ = getTests;
}
diff --git a/src/mimalloc_arena.zig b/src/mimalloc_arena.zig
index dc71a887e..c5975b72d 100644
--- a/src/mimalloc_arena.zig
+++ b/src/mimalloc_arena.zig
@@ -80,7 +80,7 @@ const ArenaRegistry = struct {
var registry = ArenaRegistry{};
pub fn register(arena: Arena) void {
- if (comptime Environment.allow_assert) {
+ if (comptime Environment.allow_assert and Environment.isNative) {
registry.mutex.lock();
defer registry.mutex.unlock();
var entry = registry.arenas.getOrPut(arena.heap.?) catch unreachable;
@@ -100,7 +100,7 @@ const ArenaRegistry = struct {
}
pub fn assert(arena: Arena) void {
- if (comptime Environment.allow_assert) {
+ if (comptime Environment.allow_assert and Environment.isNative) {
registry.mutex.lock();
defer registry.mutex.unlock();
const expected = registry.arenas.get(arena.heap.?) orelse {
@@ -117,7 +117,7 @@ const ArenaRegistry = struct {
}
pub fn unregister(arena: Arena) void {
- if (comptime Environment.allow_assert) {
+ if (comptime Environment.allow_assert and Environment.isNative) {
registry.mutex.lock();
defer registry.mutex.unlock();
if (!registry.arenas.swapRemove(arena.heap.?)) {
diff --git a/src/output.zig b/src/output.zig
index 0f9b1d67c..4feab6189 100644
--- a/src/output.zig
+++ b/src/output.zig
@@ -3,7 +3,7 @@ const bun = @import("root").bun;
const std = @import("std");
const Environment = @import("./env.zig");
const string = @import("root").bun.string;
-const root = @import("root").bun;
+const root = @import("root");
const strings = @import("root").bun.strings;
const StringTypes = @import("root").bun.StringTypes;
const Global = @import("root").bun.Global;
@@ -420,7 +420,7 @@ pub noinline fn print(comptime fmt: string, args: anytype) void {
/// BUN_DEBUG_ALL=1
const _log_fn = fn (comptime fmt: string, args: anytype) void;
pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_fn {
- if (comptime !Environment.isDebug) {
+ if (comptime !Environment.isDebug or !Environment.isNative) {
return struct {
pub fn log(comptime _: string, _: anytype) void {}
}.log;
diff --git a/src/renamer.zig b/src/renamer.zig
index b2164703d..9d2f7d414 100644
--- a/src/renamer.zig
+++ b/src/renamer.zig
@@ -875,6 +875,10 @@ pub const ExportRenamer = struct {
pub fn computeInitialReservedNames(
allocator: std.mem.Allocator,
) !bun.StringHashMapUnmanaged(u32) {
+ if (comptime bun.Environment.isWasm) {
+ unreachable;
+ }
+
var names = bun.StringHashMapUnmanaged(u32){};
const extras = .{
diff --git a/src/sourcemap/CodeCoverage.zig b/src/sourcemap/CodeCoverage.zig
index 930253731..a6982c603 100644
--- a/src/sourcemap/CodeCoverage.zig
+++ b/src/sourcemap/CodeCoverage.zig
@@ -632,10 +632,12 @@ pub const ByteRangeMapping = struct {
};
comptime {
- @export(ByteRangeMapping.generate, .{ .name = "ByteRangeMapping__generate" });
- @export(ByteRangeMapping.findExecutedLines, .{ .name = "ByteRangeMapping__findExecutedLines" });
- @export(ByteRangeMapping.find, .{ .name = "ByteRangeMapping__find" });
- @export(ByteRangeMapping.getSourceID, .{ .name = "ByteRangeMapping__getSourceID" });
+ if (bun.Environment.isNative) {
+ @export(ByteRangeMapping.generate, .{ .name = "ByteRangeMapping__generate" });
+ @export(ByteRangeMapping.findExecutedLines, .{ .name = "ByteRangeMapping__findExecutedLines" });
+ @export(ByteRangeMapping.find, .{ .name = "ByteRangeMapping__find" });
+ @export(ByteRangeMapping.getSourceID, .{ .name = "ByteRangeMapping__getSourceID" });
+ }
}
pub const CoverageFraction = struct {
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index fbfe9a3c4..3aaec1314 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -271,6 +271,10 @@ pub inline fn lastIndexOf(self: string, str: string) ?usize {
}
pub inline fn indexOf(self: string, str: string) ?usize {
+ if (comptime !bun.Environment.isNative) {
+ return std.mem.indexOf(u8, self, str);
+ }
+
const self_len = self.len;
const str_len = str.len;
@@ -921,7 +925,7 @@ inline fn eqlComptimeCheckLenWithKnownType(comptime Type: type, a: []const Type,
}
const len = comptime b.len;
- comptime var dword_length = b.len >> 3;
+ comptime var dword_length = b.len >> if (Environment.isNative) 3 else 2;
const slice = b;
const divisor = comptime @sizeOf(Type);
@@ -1521,7 +1525,11 @@ pub fn toUTF8ListWithTypeBun(list_: std.ArrayList(u8), comptime Type: type, utf1
utf16_remaining = utf16_remaining[replacement.len..];
const count: usize = replacement.utf8Width();
- try list.ensureTotalCapacityPrecise(i + count + list.items.len + @as(usize, @intFromFloat((@as(f64, @floatFromInt(@as(u52, @truncate(utf16_remaining.len)))) * 1.2))));
+ if (comptime Environment.isNative) {
+ try list.ensureTotalCapacityPrecise(i + count + list.items.len + @as(usize, @intFromFloat((@as(f64, @floatFromInt(@as(u52, @truncate(utf16_remaining.len)))) * 1.2))));
+ } else {
+ try list.ensureTotalCapacityPrecise(i + count + list.items.len + utf16_remaining.len + 4);
+ }
list.items.len += i;
copyU16IntoU8(
@@ -3445,6 +3453,10 @@ pub fn indexOfCharUsize(slice: []const u8, char: u8) ?usize {
if (slice.len == 0)
return null;
+ if (comptime !Environment.isNative) {
+ return std.mem.indexOfScalar(u8, slice, char);
+ }
+
const ptr = bun.C.memchr(slice.ptr, char, slice.len) orelse return null;
const i = @intFromPtr(ptr) - @intFromPtr(slice.ptr);
std.debug.assert(i < slice.len);
@@ -3704,7 +3716,7 @@ pub fn getLinesInText(text: []const u8, line: u32, comptime line_range_count: us
pub fn firstNonASCII16CheckMin(comptime Slice: type, slice: Slice, comptime check_min: bool) ?u32 {
var remaining = slice;
- if (comptime Environment.enableSIMD) {
+ if (comptime Environment.enableSIMD and Environment.isNative) {
const end_ptr = remaining.ptr + remaining.len - (remaining.len % ascii_u16_vector_size);
if (remaining.len > ascii_u16_vector_size) {
const remaining_start = remaining.ptr;
@@ -3780,7 +3792,7 @@ pub fn @"nextUTF16NonASCIIOr$`\\"(
) ?u32 {
var remaining = slice;
- if (comptime Environment.enableSIMD) {
+ if (comptime Environment.enableSIMD and Environment.isNative) {
while (remaining.len >= ascii_u16_vector_size) {
const vec: AsciiU16Vector = remaining[0..ascii_u16_vector_size].*;
@@ -4054,7 +4066,8 @@ pub fn join(slices: []const string, delimiter: string, allocator: std.mem.Alloca
pub fn order(a: []const u8, b: []const u8) std.math.Order {
const len = @min(a.len, b.len);
- const cmp = bun.C.memcmp(a.ptr, b.ptr, len);
+
+ const cmp = if (comptime Environment.isNative) bun.C.memcmp(a.ptr, b.ptr, len) else return std.mem.order(u8, a, b);
return switch (std.math.sign(cmp)) {
0 => std.math.order(a.len, b.len),
1 => .gt,
diff --git a/src/tracy.zig b/src/tracy.zig
index fb900442b..a51420f02 100644
--- a/src/tracy.zig
+++ b/src/tracy.zig
@@ -441,6 +441,10 @@ fn ___tracy_emit_frame_mark(name: ?[*:0]const u8) void {
}
pub fn init() bool {
+ if (comptime !bun.Environment.isNative) {
+ return false;
+ }
+
if (enable)
return true;
@@ -452,6 +456,10 @@ pub fn init() bool {
}
pub fn isConnected() bool {
+ if (comptime !bun.Environment.isNative) {
+ return false;
+ }
+
if (!enable)
return false;
@@ -460,6 +468,10 @@ pub fn isConnected() bool {
}
pub fn initThread(comptime name: [:0]const u8) void {
+ if (comptime !bun.Environment.isNative) {
+ return;
+ }
+
if (!enable)
return;
@@ -475,6 +487,10 @@ const ___tracy_source_location_data = extern struct {
};
fn dlsym(comptime Type: type, comptime symbol: [:0]const u8) ?Type {
+ if (comptime !bun.Environment.isNative) {
+ return null;
+ }
+
if (comptime bun.Environment.isLinux) {
// use LD_PRELOAD on linux
if (bun.C.dlsym(Type, symbol)) |val| {
diff --git a/test/js/node/path/path.test.js b/test/js/node/path/path.test.js
index 0df4f5abb..47d16fee0 100644
--- a/test/js/node/path/path.test.js
+++ b/test/js/node/path/path.test.js
@@ -628,8 +628,6 @@ it("path.resolve", () => {
});
it("path.parse", () => {
- expect(path.parse("/tmp")).toStrictEqual({ root: "/", dir: "/", base: "tmp", ext: "", name: "tmp" });
-
expect(path.parse("/tmp/test.txt")).toStrictEqual({
root: "/",
dir: "/tmp",