diff options
author | 2023-08-24 18:39:28 -0300 | |
---|---|---|
committer | 2023-08-24 14:39:28 -0700 | |
commit | 55eb4ffe8fc8708d5a44c53fe75026a03f0a4de8 (patch) | |
tree | b807c9eb1819e28eb991e583e4c7550b996a0ff2 | |
parent | a051a6f62014b740702594527b19464ce24ba32b (diff) | |
download | bun-55eb4ffe8fc8708d5a44c53fe75026a03f0a4de8.tar.gz bun-55eb4ffe8fc8708d5a44c53fe75026a03f0a4de8.tar.zst bun-55eb4ffe8fc8708d5a44c53fe75026a03f0a4de8.zip |
Update bun-polyfills & bun-wasm (#4246)
* automate Bun.version & revision polyfills
* polyfill Bun.gc
* bun:jsc module initial polyfills
* update peechy schema
* bun-polyfills: fix some project configs
* bun-wasm: lots of fixes
* bun-polyfills: Bun.Transpiler impl.
* revision hash update
-rwxr-xr-x | packages/bun-polyfills/bun.lockb | bin | 27871 -> 28510 bytes | |||
-rw-r--r-- | packages/bun-polyfills/package.json | 4 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun.ts | 5 | ||||
-rw-r--r-- | packages/bun-polyfills/src/modules/bun/transpiler.ts | 149 | ||||
-rw-r--r-- | packages/bun-polyfills/src/repl.ts | 1 | ||||
-rw-r--r-- | packages/bun-polyfills/tsconfig.json | 2 | ||||
-rw-r--r-- | packages/bun-wasm/index.ts | 82 | ||||
-rw-r--r-- | src/api/schema.d.ts | 1 | ||||
-rw-r--r-- | src/api/schema.js | 16 | ||||
-rw-r--r-- | src/api/schema.peechy | 3 | ||||
-rw-r--r-- | src/api/schema.zig | 5 | ||||
-rw-r--r-- | src/main_wasm.zig | 50 |
12 files changed, 169 insertions, 149 deletions
diff --git a/packages/bun-polyfills/bun.lockb b/packages/bun-polyfills/bun.lockb Binary files differindex fdc201d28..f93a0d084 100755 --- a/packages/bun-polyfills/bun.lockb +++ b/packages/bun-polyfills/bun.lockb diff --git a/packages/bun-polyfills/package.json b/packages/bun-polyfills/package.json index f08452c3d..be3d906ba 100644 --- a/packages/bun-polyfills/package.json +++ b/packages/bun-polyfills/package.json @@ -15,11 +15,13 @@ "node": "node --enable-source-maps --import ./dist/src/repl.js", "clean": "rm -rf dist", "preprocess": "bun tools/updateversions.ts", - "build": "bun run clean && bun run preprocess && bunx tsc && bunx copyfiles \"./**/*.wasm\" dist", + "build": "bun run clean && bun run preprocess && bunx tsc && bunx copyfiles \"./lib/**/*.wasm\" dist", "build/wasm": "bun run build/zighash", "build/zighash": "cd lib/zighash && bun run build && cd ../.." }, "dependencies": { + "bun-wasm": "link:bun-wasm", + "chalk": "^5.3.0", "js-md4": "^0.3.2", "open-editor": "^4.0.0", "supports-color": "^9.4.0", diff --git a/packages/bun-polyfills/src/modules/bun.ts b/packages/bun-polyfills/src/modules/bun.ts index 432daea95..caddd2c02 100644 --- a/packages/bun-polyfills/src/modules/bun.ts +++ b/packages/bun-polyfills/src/modules/bun.ts @@ -16,6 +16,7 @@ import { } from './bun/hashes.js';
import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js';
import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js';
+import TranspilerImpl from './bun/transpiler.js';
import fs from 'node:fs';
import v8 from 'node:v8';
import path from 'node:path';
@@ -32,7 +33,7 @@ export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satis //? These are automatically updated on build by tools/updateversions.ts, do not edit manually.
export const version = '0.7.4' satisfies typeof Bun.version;
-export const revision = '7088d7e182635a58a50860302da0b1abc42c7ce7' satisfies typeof Bun.revision;
+export const revision = '56816a3ec845a4b9fc40ade34dbe5c0033433d51' satisfies typeof Bun.revision;
export const gc = (globalThis.gc ? (() => (globalThis.gc!(), process.memoryUsage().heapUsed)) : (() => {
const err = new Error('[bun-polyfills] Garbage collection polyfills are only available when Node.js is ran with the --expose-gc flag.');
@@ -71,6 +72,8 @@ export const unsafe = { }
} satisfies typeof Bun['unsafe'];
+export const Transpiler = TranspilerImpl satisfies typeof Bun.Transpiler;
+
export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1;
export const MD5 = MD5Polyfill satisfies typeof Bun.MD5;
export const MD4 = MD4Polyfill satisfies typeof Bun.MD4;
diff --git a/packages/bun-polyfills/src/modules/bun/transpiler.ts b/packages/bun-polyfills/src/modules/bun/transpiler.ts index b4b95dae1..ba33e0653 100644 --- a/packages/bun-polyfills/src/modules/bun/transpiler.ts +++ b/packages/bun-polyfills/src/modules/bun/transpiler.ts @@ -1,103 +1,96 @@ import type { JavaScriptLoader, TranspilerOptions, Transpiler as BunTranspiler, Import } from 'bun';
-import { NotImplementedError } from '../../utils/errors.js';
+import { transformSync, scan, init } from 'bun-wasm';
+import { Message } from 'bun-wasm/schema';
+import $ from 'chalk';
-// TODO: Possible implementation with WASM builds of bun with just the transpiler?
-// NOTE: This is possible to implement with something like SWC, and was previously done,
-// but it has lots of quirks due to the differences between SWC and Bun, so the plan is
-// to not do that unless there is actual demand for using Bun.Transpiler in Node.js before
-// the WASM build is worked on. The signatures are here for now as a placeholder.
+await init();
+
+enum InternalImportKind {
+ 'entry-point' = 1, // entry_point
+ 'import-statement' = 2, // stmt
+ 'require-call' = 3, // require
+ 'dynamic-import' = 4, // dynamic
+ 'require-resolve' = 5, // require_resolve
+ 'import-rule' = 6, // at
+ 'url-token' = 7, // url
+ 'internal' = 8, // internal
+}
+
+export type ScanImportsEntry = {
+ kind: 'import-statement' | 'dynamic-import';
+ path: string;
+};
export default class Transpiler implements BunTranspiler {
constructor(options?: TranspilerOptions) {
this.#options = options ?? {};
+ this.#rootFile = 'input.tsx'; // + (this.#options.loader ?? 'tsx');
+ //? ^ NOTE: with current bun-wasm builds, the loader option is ignored and hardcoded to tsx
}
+ #options: TranspilerOptions;
+ #rootFile: string;
+ #decoder?: TextDecoder;
+ #internallyCalled: boolean = false;
async transform(code: StringOrBuffer, loader: JavaScriptLoader): Promise<string> {
- if (typeof code !== 'string') code = new TextDecoder().decode(code);
- throw new NotImplementedError('Bun.Transpiler', this.transform);
+ this.#internallyCalled = true;
+ return this.transformSync(code, loader);
}
transformSync(code: StringOrBuffer, ctx: object): string;
transformSync(code: StringOrBuffer, loader: JavaScriptLoader, ctx: object): string;
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | undefined): string;
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | object, ctx: object = {}): string {
- if (typeof code !== 'string') code = new TextDecoder().decode(code);
- if (typeof loader !== 'string') loader = 'js';
- throw new NotImplementedError('Bun.Transpiler', this.transformSync);
+ if (!code) return ''; // wasm dies with empty string input
+ if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array');
+ if (typeof loader !== 'string') loader = this.#options.loader;
+ const result = transformSync(code, this.#rootFile, loader);
+ // status 1 = success, status 2 = error
+ if (result.status === 2) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.transform : this.transformSync);
+ this.#internallyCalled = false;
+ this.#decoder ??= new TextDecoder();
+ return this.#decoder.decode(result.files[0].data);
}
scan(code: StringOrBuffer): { exports: string[]; imports: Import[]; } {
- if (typeof code !== 'string') code = new TextDecoder().decode(code);
- throw new NotImplementedError('Bun.Transpiler', this.scan);
- //return {
- // imports: this.scanImports(code),
- // exports: this.#scanExports(code)
- //};
- }
+ if (!code) return { exports: [], imports: [] }; // wasm dies with empty string input
+ if (typeof code !== 'string' && !(code instanceof Uint8Array)) throw new TypeError('code must be a string or Uint8Array');
- scanImports(code: StringOrBuffer): {
- kind: 'import-statement' | 'dynamic-import';
- path: string;
- }[] {
- if (typeof code !== 'string') code = new TextDecoder().decode(code);
- throw new NotImplementedError('Bun.Transpiler', this.scanImports);
- //const imports: { kind: 'import-statement' | 'dynamic-import', path: string }[] = [];
- //this.#scanTopLevelImports(code).forEach(x => imports.push({ kind: 'import-statement', path: x }));
- //this.#scanDynamicImports(code).forEach(x => imports.push({ kind: 'dynamic-import', path: x }));
- //return imports;
- }
+ const result = scan(code, this.#rootFile, this.#options.loader);
+ if (result.errors.length) throw formatBuildErrors(result.errors, this.#internallyCalled ? this.scanImports : this.scan);
+ this.#internallyCalled = false;
- /*#scanDynamicImports(code: string): string[] {
- return this.parseSync(code, {
- syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
- }).body.filter(x => x.type === 'ExpressionStatement' && x.expression.type === 'CallExpression' && x.expression.callee.type === 'Import')
- .map(i => (((i as swc.ExpressionStatement).expression as swc.CallExpression).arguments[0].expression as swc.StringLiteral).value);
- }*/
+ result.imports.forEach(imp => (imp.kind as unknown) = InternalImportKind[imp.kind]);
+ return {
+ exports: result.exports,
+ imports: result.imports as unknown as Import[],
+ };
+ }
- /*#scanTopLevelImports(code: string): string[] {
- return this.parseSync(code, {
- syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
- }).body.filter(x => x.type === 'ImportDeclaration' || x.type === 'ExportAllDeclaration' || x.type === 'ExportNamedDeclaration')
- .filter(i => !(i as swc.ImportDeclaration).typeOnly)
- .map(i => (i as swc.ImportDeclaration).source.value);
- }*/
+ scanImports(code: StringOrBuffer): ScanImportsEntry[] {
+ this.#internallyCalled = true;
+ return this.scan(code).imports.filter(imp => imp.kind === 'import-statement' || imp.kind === 'dynamic-import') as ScanImportsEntry[];
+ }
+}
- /*#scanExports(code: string, includeDefault: boolean = false): string[] {
- const parsed = this.parseSync(code, {
- syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
- }).body;
- const exports = [];
- exports.push(parsed.filter(x => x.type === 'ExportDeclaration' && !x.declaration.declare)
- .flatMap(i => ((i as swc.ExportDeclaration).declaration as swc.ClassDeclaration).identifier?.value ??
- ((i as swc.ExportDeclaration).declaration as swc.VariableDeclaration).declarations.map(d => (d.id as swc.Identifier).value)
- )
+function formatBuildErrors(buildErrors: Message[], caller: Transpiler[keyof Transpiler]): AggregateError {
+ const formatted = buildErrors.map(err => {
+ const loc = err.data.location;
+ const str = `${$.redBright('error')}${$.gray(':')} ${$.bold(err.data.text)}\n` +
+ (loc
+ ? `${highlightErrorChar(loc.line_text, loc.offset)}\n` +
+ $.redBright.bold('^'.padStart(loc.column)) + '\n' +
+ `${$.bold(loc.file)}${$.gray(':')}${$.yellowBright(loc.line)}${$.gray(':')}${$.yellowBright(loc.column)} ${$.gray(loc.offset)}`
+ : ''
);
- exports.push(parsed.filter(x => x.type === 'ExportNamedDeclaration')
- .flatMap(i => (i as swc.ExportNamedDeclaration).specifiers
- .filter(s => s.type === 'ExportSpecifier' && !s.isTypeOnly)
- .map(s => (s as swc.NamedExportSpecifier).exported?.value ?? (s as swc.NamedExportSpecifier).orig.value)
- )
- );
- if (includeDefault) exports.push(this.#scanDefaultExport(code) ?? []);
- return exports.flat();
- }*/
-
- /*#scanDefaultExport(code: string): 'default' | undefined {
- const parsed = this.parseSync(code, {
- syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
- }).body;
-
- const defaultExportDecl = parsed.find(x => x.type === 'ExportDefaultDeclaration') as swc.ExportDefaultDeclaration | undefined;
- if (!defaultExportDecl) {
- const defaultExportExpr = parsed.find(x => x.type === 'ExportDefaultExpression') as swc.ExportDefaultExpression | undefined;
- if (!defaultExportExpr) return undefined;
- if (!defaultExportExpr.expression.type.startsWith('Ts')) return 'default';
- else return undefined;
- }
-
- if (!defaultExportDecl.decl.type.startsWith('Ts') && !Reflect.get(defaultExportDecl.decl, 'declare')) return 'default';
- else return undefined;
- }*/
+ return { __proto__: Error.prototype, stack: str };
+ });
+ const aggregate = new AggregateError(formatted, `Input code has ${formatted.length} error${formatted.length === 1 ? '' : 's'}`);
+ Error.captureStackTrace(aggregate, caller);
+ aggregate.name = 'BuildError';
+ return aggregate;
+}
- #options: TranspilerOptions;
+function highlightErrorChar(str: string, at: number): string {
+ return str.slice(0, at) + $.red(str[at]) + str.slice(at + 1);
}
diff --git a/packages/bun-polyfills/src/repl.ts b/packages/bun-polyfills/src/repl.ts index 030e479b0..60090c7d6 100644 --- a/packages/bun-polyfills/src/repl.ts +++ b/packages/bun-polyfills/src/repl.ts @@ -15,7 +15,6 @@ globalThis.Bun = bun as typeof bun & { mmap: typeof import('bun').mmap; connect: typeof import('bun').connect; listen: typeof import('bun').listen; - Transpiler: typeof import('bun').Transpiler; password: typeof import('bun').password; CryptoHashInterface: typeof import('bun').CryptoHashInterface; CryptoHasher: typeof import('bun').CryptoHasher; diff --git a/packages/bun-polyfills/tsconfig.json b/packages/bun-polyfills/tsconfig.json index b00abe344..e90140678 100644 --- a/packages/bun-polyfills/tsconfig.json +++ b/packages/bun-polyfills/tsconfig.json @@ -15,5 +15,5 @@ "outDir": "dist", "types": ["node"] }, - "include": [".", "../bun-types/index.d.ts"], + "include": ["src", "lib", "../bun-types/index.d.ts"], } diff --git a/packages/bun-wasm/index.ts b/packages/bun-wasm/index.ts index 72eed5220..9802d537e 100644 --- a/packages/bun-wasm/index.ts +++ b/packages/bun-wasm/index.ts @@ -10,7 +10,7 @@ import { encodeTransform, type ScanResult, type TransformResponse, -} from "./schema"; +} from "./schema.js"; export enum Loader { jsx = BunLoader.jsx, @@ -54,17 +54,13 @@ function normalizeLoader(file_name: string, loader?: keyof typeof Loader): BunLo interface WebAssemblyModule { init(heapSize: number): number; - transform(a: number): number; - bun_malloc(a: number): number; - bun_free(a: number): number; - scan(a: number): number; - getTests(a: number): number; + transform(a: bigint): bigint; + bun_malloc(a: number | bigint): bigint; + bun_free(a: bigint): void; + scan(a: bigint): bigint; + getTests(a: bigint): bigint; } -const ptr_converter = new ArrayBuffer(16); -const ptr_float = new BigUint64Array(ptr_converter); -const slice = new Uint32Array(ptr_converter); - const Wasi = { clock_time_get(clk_id: unknown, tp: unknown) { return Date.now(); @@ -92,11 +88,8 @@ const Wasi = { }, }; -var scratch: Uint8Array; -var scratch2: Uint8Array; - const env = { - console_log(slice: number) { + console_log(slice: bigint) { // @ts-expect-error const text = Bun._wasmPtrLenToString(slice); if (captureErrors) { @@ -105,7 +98,7 @@ const env = { } console.log(text); }, - console_error(slice: number) { + console_error(slice: bigint) { // @ts-expect-error const text = Bun._wasmPtrLenToString(slice); if (captureErrors) { @@ -114,11 +107,11 @@ const env = { } console.error(text); }, - console_warn(slice: number) { + console_warn(slice: bigint) { // @ts-expect-error console.warn(Bun._wasmPtrLenToString(slice)); }, - console_info(slice: number) { + console_info(slice: bigint) { // @ts-expect-error console.info(Bun._wasmPtrLenToString(slice)); }, @@ -171,6 +164,7 @@ const env = { }, emscripten_notify_memory_growth() {}, }; + export class Bun { private static has_initialized = false; private static wasm_source: WebAssembly.WebAssemblyInstantiatedSource; @@ -182,16 +176,22 @@ export class Bun { return Bun.wasm_source.instance.exports.memory as WebAssembly.Memory; } + private static scratch: Uint8Array = new Uint8Array(8096); private static memory_array: Uint8Array; private static _decoder: TextDecoder; + private static _encoder: TextEncoder = new TextEncoder(); - private static _wasmPtrToSlice(offset: number | bigint) { - ptr_float[0] = typeof offset === "number" ? BigInt(offset) : offset; - return new Uint8Array(Bun.memory.buffer, slice[0], slice[1]); + private static ptr_converter = new ArrayBuffer(16); + private static ptr_float = new BigUint64Array(Bun.ptr_converter); + private static ptr_slice = new Uint32Array(Bun.ptr_converter); + + private static _wasmPtrToSlice(offset: bigint) { + Bun.ptr_float[0] = typeof offset === "number" ? BigInt(offset) : offset; + return new Uint8Array(Bun.memory.buffer, Bun.ptr_slice[0], Bun.ptr_slice[1]); } - private static _wasmPtrLenToString(slice: number) { + private static _wasmPtrLenToString(slice: bigint) { if (!Bun._decoder) { Bun._decoder = new TextDecoder("utf8"); } @@ -201,14 +201,11 @@ export class Bun { } static async init(url?: URL | string | null, heapSize = 64_000_000, fetch = globalThis.fetch) { + if (Bun.has_initialized) return; url ??= new URL("./bun.wasm", import.meta.url); - scratch = new Uint8Array(8096); - if (Bun.has_initialized) { - return; - } if (typeof process === "undefined") { - if (globalThis?.WebAssembly?.instantiateStreaming) { + if (globalThis.WebAssembly.instantiateStreaming) { Bun.wasm_source = await globalThis.WebAssembly.instantiateStreaming(fetch(url), { env: env, wasi_snapshot_preview1: Wasi, @@ -244,7 +241,7 @@ export class Bun { } static getTests(content: Uint8Array, filename = "my.test.tsx") { - const bb = new ByteBuffer(scratch); + const bb = new ByteBuffer(Bun.scratch); bb.length = 0; bb.index = 0; const contents_buffer = content; @@ -313,25 +310,12 @@ export class Bun { } static transformSync(content: Uint8Array | string, file_name: string, loader?: keyof typeof Loader): TransformResponse { - const bb = new ByteBuffer(scratch); + const bb = new ByteBuffer(Bun.scratch); bb.length = 0; bb.index = 0; var contents_buffer; if (typeof content === "string") { - if (!scratch2) { - scratch2 = new Uint8Array(content.length * 2); - } - - let i = 0; - for (; i < content.length; i++) { - if (i > scratch2.length) { - var scratch3 = new Uint8Array(scratch2.length * 2); - scratch3.set(scratch2); - scratch2 = scratch3; - } - scratch2[i] = content.charCodeAt(i); - } - contents_buffer = scratch2.subarray(0, i); + contents_buffer = Bun._encoder.encode(content); } else { contents_buffer = content; } @@ -353,21 +337,17 @@ export class Bun { var _bb = new ByteBuffer(Bun._wasmPtrToSlice(resp_ptr)); const response = decodeTransformResponse(_bb); Bun.wasm_exports.bun_free(input_ptr); - scratch = bb.data; + Bun.scratch = bb.data; return response; } static scan(content: Uint8Array | string, file_name: string, loader?: keyof typeof Loader): ScanResult { - const bb = new ByteBuffer(scratch); + const bb = new ByteBuffer(Bun.scratch); bb.length = 0; bb.index = 0; var contents_buffer; if (typeof content === "string") { - if (!scratch2) { - scratch2 = new Uint8Array(content.length * 2); - } - const encode_into = new TextEncoder().encodeInto(content, scratch2); - contents_buffer = scratch2.subarray(0, encode_into.written); + contents_buffer = Bun._encoder.encode(content); } else { contents_buffer = content; } @@ -388,9 +368,11 @@ export class Bun { const resp_ptr = Bun.wasm_exports.scan(input_ptr); var _bb = new ByteBuffer(Bun._wasmPtrToSlice(resp_ptr)); + //console.log(resp_ptr, Bun.ptr_slice[0], Bun.ptr_slice[1], new Uint8Array(Bun.memory.buffer, Bun.ptr_slice[0], Bun.ptr_slice[1] + 82)); + //console.log(_bb); const response = decodeScanResult(_bb); Bun.wasm_exports.bun_free(input_ptr); - scratch = bb.data; + Bun.scratch = bb.data; return response; } } diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts index f1d5b5f62..3ec03e213 100644 --- a/src/api/schema.d.ts +++ b/src/api/schema.d.ts @@ -579,6 +579,7 @@ export interface Scan { export interface ScanResult { exports: string[]; imports: ScannedImport[]; + errors: Message[]; } export interface ScannedImport { diff --git a/src/api/schema.js b/src/api/schema.js index 4931cd716..6fb4b1d8d 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -2058,6 +2058,9 @@ function decodeScanResult(bb) { var length = bb.readVarUint(); var values = (result["imports"] = Array(length)); for (var i = 0; i < length; i++) values[i] = decodeScannedImport(bb); + var length = bb.readVarUint(); + var values = (result["errors"] = Array(length)); + for (var i = 0; i < length; i++) values[i] = decodeMessage(bb); return result; } @@ -2087,6 +2090,19 @@ function encodeScanResult(message, bb) { } else { throw new Error('Missing required field "imports"'); } + + var value = message["errors"]; + if (value != null) { + var values = value, + n = values.length; + bb.writeVarUint(n); + for (var i = 0; i < n; i++) { + value = values[i]; + encodeMessage(value, bb); + } + } else { + throw new Error('Missing required field "errors"'); + } } function decodeScannedImport(bb) { diff --git a/src/api/schema.peechy b/src/api/schema.peechy index dc8e312a5..8185733c9 100644 --- a/src/api/schema.peechy +++ b/src/api/schema.peechy @@ -377,6 +377,7 @@ message Scan { struct ScanResult { string[] exports; ScannedImport[] imports; + Message[] errors; } struct ScannedImport { @@ -414,7 +415,7 @@ struct TransformResponse { enum MessageLevel { err = 1; - warn =2; + warn = 2; note = 3; info = 4; debug = 5; diff --git a/src/api/schema.zig b/src/api/schema.zig index 93b526d47..ae63af87a 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -2117,17 +2117,22 @@ pub const Api = struct { /// imports imports: []const ScannedImport, + /// errors + errors: []const Message, + pub fn decode(reader: anytype) anyerror!ScanResult { var this = std.mem.zeroes(ScanResult); this.exports = try reader.readArray([]const u8); this.imports = try reader.readArray(ScannedImport); + this.errors = try reader.readArray(Message); return this; } pub fn encode(this: *const @This(), writer: anytype) anyerror!void { try writer.writeArray([]const u8, this.exports); try writer.writeArray(ScannedImport, this.imports); + try writer.writeArray(Message, this.errors); } }; diff --git a/src/main_wasm.zig b/src/main_wasm.zig index e7fd55d80..6d3ba2466 100644 --- a/src/main_wasm.zig +++ b/src/main_wasm.zig @@ -604,26 +604,44 @@ export fn scan(opts_array: u64) u64 { parser.options.ts = loader.isTypeScript(); parser.options.features.top_level_await = true; const result = parser.parse() catch unreachable; - var scan_result = std.mem.zeroes(Api.ScanResult); - var output = std.ArrayList(u8).init(default_allocator); - var output_writer = output.writer(); - const Encoder = ApiWriter(@TypeOf(output_writer)); + if (log.errors == 0) { + var scan_result = std.mem.zeroes(Api.ScanResult); + var output = std.ArrayList(u8).init(default_allocator); + var output_writer = output.writer(); + const Encoder = ApiWriter(@TypeOf(output_writer)); + + if (result == .ast) { + var scanned_imports = allocator.alloc(Api.ScannedImport, result.ast.import_records.len) catch unreachable; + var scanned_i: usize = 0; + for (result.ast.import_records.slice()) |import_record| { + if (import_record.kind == .internal) continue; + scanned_imports[scanned_i] = Api.ScannedImport{ .path = import_record.path.text, .kind = import_record.kind.toAPI() }; + scanned_i += 1; + } - if (result == .ast) { - var scanned_imports = allocator.alloc(Api.ScannedImport, result.ast.import_records.len) catch unreachable; - var scanned_i: usize = 0; - for (result.ast.import_records.slice()) |import_record| { - if (import_record.kind == .internal) continue; - scanned_imports[scanned_i] = Api.ScannedImport{ .path = import_record.path.text, .kind = import_record.kind.toAPI() }; - scanned_i += 1; + scan_result = Api.ScanResult{ + .exports = result.ast.named_exports.keys(), + .imports = scanned_imports[0..scanned_i], + .errors = (log.toAPI(allocator) catch unreachable).msgs, + }; } - scan_result = Api.ScanResult{ .exports = result.ast.named_exports.keys(), .imports = scanned_imports[0..scanned_i] }; + var encoder = Encoder.init(output_writer); + scan_result.encode(&encoder) catch unreachable; + return @as(u64, @bitCast([2]u32{ @intFromPtr(output.items.ptr), output.items.len })); + } else { + var output = std.ArrayList(u8).init(default_allocator); + var output_writer = output.writer(); + const Encoder = ApiWriter(@TypeOf(output_writer)); + var scan_result = Api.ScanResult{ + .exports = &.{}, + .imports = &.{}, + .errors = (log.toAPI(allocator) catch unreachable).msgs, + }; + var encoder = Encoder.init(output_writer); + scan_result.encode(&encoder) catch unreachable; + return @as(u64, @bitCast([2]u32{ @intFromPtr(output.items.ptr), output.items.len })); } - - var encoder = Encoder.init(output_writer); - scan_result.encode(&encoder) catch unreachable; - return @as(u64, @bitCast([2]u32{ @intFromPtr(output.items.ptr), output.items.len })); } // pub fn main() anyerror!void {} |