aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/bundler.zig13
-rw-r--r--src/bundler/bundle_v2.zig14
-rw-r--r--src/js_lexer.zig166
-rw-r--r--src/js_parser.zig51
-rw-r--r--src/options.zig83
-rw-r--r--src/resolver/tsconfig_json.zig10
-rw-r--r--src/string_immutable.zig102
7 files changed, 367 insertions, 72 deletions
diff --git a/src/bundler.zig b/src/bundler.zig
index 874bbff3a..86a5779b8 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -564,6 +564,10 @@ pub const Bundler = struct {
try this.runEnvLoader();
+ if (this.env.isProduction()) {
+ this.options.jsx.setProduction(this.allocator, true);
+ }
+
js_ast.Expr.Data.Store.create(this.allocator);
js_ast.Stmt.Data.Store.create(this.allocator);
@@ -583,15 +587,8 @@ pub const Bundler = struct {
if (this.options.define.dots.get("NODE_ENV")) |NODE_ENV| {
if (NODE_ENV.len > 0 and NODE_ENV[0].data.value == .e_string and NODE_ENV[0].data.value.e_string.eqlComptime("production")) {
this.options.production = true;
- this.options.jsx.development = false;
-
- if (this.options.jsx.import_source.ptr == options.JSX.Pragma.Defaults.ImportSourceDev) {
- this.options.jsx.import_source = options.JSX.Pragma.Defaults.ImportSource;
- }
- if (options.JSX.Pragma.Defaults.ImportSource == this.options.jsx.import_source.ptr or
- strings.eqlComptime(this.options.jsx.import_source, comptime options.JSX.Pragma.Defaults.ImportSource) or strings.eqlComptime(this.options.jsx.package_name, "react"))
- {
+ if (strings.eqlComptime(this.options.jsx.package_name, "react")) {
if (this.options.jsx_optimization_inline == null) {
this.options.jsx_optimization_inline = true;
}
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index 76cc18877..5784eeaa9 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -391,6 +391,8 @@ pub const BundleV2 = struct {
bundler.options.mark_bun_builtins_as_external = bundler.options.platform.isBun();
bundler.resolver.opts.mark_bun_builtins_as_external = bundler.options.platform.isBun();
+ var this = generator;
+
defer allocator.destroy(generator);
generator.* = BundleV2{
.bundler = bundler,
@@ -429,18 +431,6 @@ pub const BundleV2 = struct {
var batch = ThreadPoolLib.Batch{};
- var this = generator;
-
- if (this.bundler.env.isProduction()) {
- this.bundler.options.jsx.development = false;
- }
-
- if (!this.bundler.options.jsx.development) {
- this.bundler.options.jsx.import_source = std.fmt.allocPrint(allocator, "{s}/jsx-runtime", .{generator.bundler.options.jsx.classic_import_source}) catch unreachable;
- }
-
- this.bundler.resolver.opts.jsx = this.bundler.options.jsx;
-
try pool.start(this);
{
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index f5e00d1ae..95460ee05 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -36,6 +36,26 @@ fn notimpl() noreturn {
pub var emptyJavaScriptString = ([_]u16{0});
+pub const JSXPragma = struct {
+ _jsx: js_ast.Span = .{},
+ _jsxFrag: js_ast.Span = .{},
+ _jsxRuntime: js_ast.Span = .{},
+ _jsxImportSource: js_ast.Span = .{},
+
+ pub fn jsx(this: *const JSXPragma) ?js_ast.Span {
+ return if (this._jsx.text.len > 0) this._jsx else null;
+ }
+ pub fn jsxFrag(this: *const JSXPragma) ?js_ast.Span {
+ return if (this._jsxFrag.text.len > 0) this._jsxFrag else null;
+ }
+ pub fn jsxRuntime(this: *const JSXPragma) ?js_ast.Span {
+ return if (this._jsxRuntime.text.len > 0) this._jsxRuntime else null;
+ }
+ pub fn jsxImportSource(this: *const JSXPragma) ?js_ast.Span {
+ return if (this._jsxImportSource.text.len > 0) this._jsxImportSource else null;
+ }
+};
+
pub const JSONOptions = struct {
/// Enable JSON-specific warnings/errors
is_json: bool = false,
@@ -127,8 +147,8 @@ fn NewLexer_(
all_original_comments: ?[]js_ast.G.Comment = null,
code_point: CodePoint = -1,
identifier: []const u8 = "",
- jsx_factory_pragma_comment: ?js_ast.Span = null,
- jsx_fragment_pragma_comment: ?js_ast.Span = null,
+ jsx_pragma: JSXPragma = .{},
+ bun_pragma: bool = false,
source_mapping_url: ?js_ast.Span = null,
number: f64 = 0.0,
rescan_close_brace_as_template_token: bool = false,
@@ -167,8 +187,7 @@ fn NewLexer_(
.code_point = self.code_point,
.identifier = self.identifier,
.regex_flags_start = self.regex_flags_start,
- .jsx_factory_pragma_comment = self.jsx_factory_pragma_comment,
- .jsx_fragment_pragma_comment = self.jsx_fragment_pragma_comment,
+ .jsx_pragma = self.jsx_pragma,
.source_mapping_url = self.source_mapping_url,
.number = self.number,
.rescan_close_brace_as_template_token = self.rescan_close_brace_as_template_token,
@@ -1819,10 +1838,6 @@ fn NewLexer_(
if (comptime is_json)
return;
- // TODO: @jsx annotations
- if (lexer.has_pure_comment_before)
- return;
-
var rest = text[0..end_comment_text];
const end = rest.ptr + rest.len;
@@ -1848,20 +1863,35 @@ fn NewLexer_(
for (@as([strings.ascii_vector_size]u8, vec), 0..) |c, i| {
switch (c) {
'@', '#' => {
+ const chunk = rest[i + 1 ..];
if (!lexer.has_pure_comment_before) {
- if (strings.indexOf(rest[i + 1 ..], "__PURE__")) |pure_i| {
- const pure_prefix = rest[i + 1 ..][0..pure_i];
- const pure_suffix = rest[i + 1 ..][pure_i + "__PURE__".len ..];
- const has_prefix = pure_prefix.len == 0 or (strings.isAllASCII(pure_prefix) and !isIdentifierStart(pure_prefix[pure_prefix.len - 1]));
-
- // TODO: unicode whitespace for pure comments
- // This misses non-ascii whitespace, but that's fine for now
- const has_suffix = pure_suffix.len == 0 or !isIdentifierContinue(pure_suffix[0]);
- lexer.has_pure_comment_before = has_prefix and has_suffix;
-
- // TODO: handle JSX annotations
- if (lexer.has_pure_comment_before)
- return;
+ if (strings.hasPrefixWithWordBoundary(chunk, "__PURE__")) {
+ lexer.has_pure_comment_before = true;
+ continue;
+ }
+ }
+
+ if (strings.hasPrefixWithWordBoundary(chunk, "bun")) {
+ lexer.bun_pragma = true;
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsx")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsx", chunk)) |span| {
+ lexer.jsx_pragma._jsx = span;
+ }
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsxFrag")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsxFrag", chunk)) |span| {
+ lexer.jsx_pragma._jsxFrag = span;
+ }
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsxRuntime")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsxRuntime", chunk)) |span| {
+ lexer.jsx_pragma._jsxRuntime = span;
+ }
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsxImportSource")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsxImportSource", chunk)) |span| {
+ lexer.jsx_pragma._jsxImportSource = span;
+ }
+ } else if (i == 2 and strings.hasPrefixComptime(chunk, " sourceMappingURL=")) {
+ if (PragmaArg.scan(.no_space_first, lexer.start + i + 1, " sourceMappingURL=", chunk)) |span| {
+ lexer.source_mapping_url = span;
}
}
},
@@ -1883,20 +1913,36 @@ fn NewLexer_(
rest = rest[1..];
switch (c) {
'@', '#' => {
+ const chunk = rest;
+ const i = @ptrToInt(chunk.ptr) - @ptrToInt(text.ptr);
if (!lexer.has_pure_comment_before) {
- if (strings.indexOf(rest, "__PURE__")) |pure_i| {
- const pure_prefix = rest[0..pure_i];
- const pure_suffix = rest[pure_i + "__PURE__".len ..];
- const has_prefix = pure_prefix.len == 0 or (strings.isAllASCII(pure_prefix) and !isIdentifierStart(pure_prefix[pure_prefix.len - 1]));
-
- // TODO: unicode whitespace for pure comments
- // This misses non-ascii whitespace, but that's fine for now
- const has_suffix = pure_suffix.len == 0 or !isIdentifierContinue(pure_suffix[0]);
- lexer.has_pure_comment_before = (has_prefix and has_suffix);
-
- // TODO: handle JSX annotations
- if (lexer.has_pure_comment_before)
- return;
+ if (strings.hasPrefixWithWordBoundary(chunk, "__PURE__")) {
+ lexer.has_pure_comment_before = true;
+ continue;
+ }
+ }
+
+ if (strings.hasPrefixWithWordBoundary(chunk, "bun")) {
+ lexer.bun_pragma = true;
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsx")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsx", chunk)) |span| {
+ lexer.jsx_pragma._jsx = span;
+ }
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsxFrag")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsxFrag", chunk)) |span| {
+ lexer.jsx_pragma._jsxFrag = span;
+ }
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsxRuntime")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsxRuntime", chunk)) |span| {
+ lexer.jsx_pragma._jsxRuntime = span;
+ }
+ } else if (strings.hasPrefixWithWordBoundary(chunk, "jsxImportSource")) {
+ if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsxImportSource", chunk)) |span| {
+ lexer.jsx_pragma._jsxImportSource = span;
+ }
+ } else if (i == 2 and strings.hasPrefixComptime(chunk, " sourceMappingURL=")) {
+ if (PragmaArg.scan(.no_space_first, lexer.start + i + 1, " sourceMappingURL=", chunk)) |span| {
+ lexer.source_mapping_url = span;
}
}
},
@@ -3121,6 +3167,58 @@ pub fn isLatin1Identifier(comptime Buffer: type, name: Buffer) bool {
return true;
}
+pub const PragmaArg = enum {
+ no_space_first,
+ skip_space_first,
+
+ pub fn scan(kind: PragmaArg, offset_: usize, pragma: string, text_: string) ?js_ast.Span {
+ var text = text_[pragma.len..];
+ var iter = strings.CodepointIterator.init(text);
+
+ var cursor = strings.CodepointIterator.Cursor{};
+ if (!iter.next(&cursor)) {
+ return null;
+ }
+
+ const start: u32 = brk: {
+ // One or more whitespace characters
+ if (kind == .skip_space_first) {
+ if (!isWhitespace(cursor.c)) {
+ return null;
+ }
+
+ while (iter.next(&cursor)) {
+ if (!isWhitespace(cursor.c)) {
+ break;
+ }
+ }
+
+ break :brk cursor.i;
+ }
+
+ break :brk 0;
+ };
+
+ var end = cursor.i -| @as(u32, cursor.width);
+
+ while (!isWhitespace(cursor.c)) : (end = cursor.i -| @as(u32, cursor.width)) {
+ if (!iter.next(&cursor)) {
+ break;
+ }
+ }
+
+ return js_ast.Span{
+ .range = logger.Range{
+ .len = @intCast(i32, end) - @intCast(i32, start),
+ .loc = logger.Loc{
+ .start = @intCast(i32, start + @intCast(u32, offset_) + @intCast(u32, pragma.len)),
+ },
+ },
+ .text = text[start..end],
+ };
+ }
+};
+
fn skipToInterestingCharacterInMultilineComment(text_: []const u8) ?u32 {
var text = text_;
const star = @splat(strings.ascii_vector_size, @as(u8, '*'));
diff --git a/src/js_parser.zig b/src/js_parser.zig
index ebb7a82d7..9d585c619 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -108,13 +108,15 @@ const JSXImport = enum {
jsxs: ?LocRef = null,
Fragment: ?LocRef = null,
createElement: ?LocRef = null,
+ factory_name: []const u8 = "createElement",
+ fragment_name: []const u8 = "Fragment",
pub fn get(this: *const Symbols, name: []const u8) ?Ref {
if (strings.eqlComptime(name, "jsx")) return if (this.jsx) |jsx| jsx.ref.? else null;
if (strings.eqlComptime(name, "jsxDEV")) return if (this.jsxDEV) |jsx| jsx.ref.? else null;
if (strings.eqlComptime(name, "jsxs")) return if (this.jsxs) |jsxs| jsxs.ref.? else null;
- if (strings.eqlComptime(name, "Fragment")) return if (this.Fragment) |Fragment| Fragment.ref.? else null;
- if (strings.eqlComptime(name, "createElement")) return if (this.createElement) |createElement| createElement.ref.? else null;
+ if (strings.eql(name, this.fragment_name)) return if (this.Fragment) |Fragment| Fragment.ref.? else null;
+ if (strings.eql(name, this.factory_name)) return if (this.createElement) |createElement| createElement.ref.? else null;
return null;
}
@@ -3948,7 +3950,7 @@ pub const Parser = struct {
&before,
&p.jsx_imports,
null,
- "jsx",
+ "",
false,
) catch unreachable;
}
@@ -3960,7 +3962,7 @@ pub const Parser = struct {
&before,
&p.jsx_imports,
null,
- "React",
+ "",
false,
) catch unreachable;
}
@@ -5889,6 +5891,28 @@ fn NewParser_(
p.module_scope = p.current_scope;
p.has_es_module_syntax = p.esm_import_keyword.len > 0 or p.esm_export_keyword.len > 0 or p.top_level_await_keyword.len > 0;
+ if (p.lexer.jsx_pragma.jsx()) |factory| {
+ p.options.jsx.factory = options.JSX.Pragma.memberListToComponentsIfDifferent(p.allocator, p.options.jsx.factory, factory.text) catch unreachable;
+ }
+
+ if (p.lexer.jsx_pragma.jsxFrag()) |fragment| {
+ p.options.jsx.fragment = options.JSX.Pragma.memberListToComponentsIfDifferent(p.allocator, p.options.jsx.fragment, fragment.text) catch unreachable;
+ }
+
+ if (p.lexer.jsx_pragma.jsxImportSource()) |import_source| {
+ p.options.jsx.import_source = try p.allocator.dupe(u8, import_source.text);
+ p.options.jsx.classic_import_source = options.JSX.Pragma.parsePackageName(p.options.jsx.import_source);
+ }
+
+ if (p.lexer.jsx_pragma.jsxRuntime()) |runtime| {
+ if (options.JSX.RuntimeMap.get(runtime.text)) |jsx_runtime| {
+ p.options.jsx.runtime = jsx_runtime;
+ } else {
+ // make this a warning instead of an error because we don't support "preserve" right now
+ try p.log.addRangeWarningFmt(p.source, runtime.range, p.allocator, "Unsupported JSX runtime: \"{s}\"", .{runtime.text});
+ }
+ }
+
// ECMAScript modules are always interpreted as strict mode. This has to be
// done before "hoistSymbols" because strict mode can alter hoisting (!).
if (p.esm_import_keyword.len > 0) {
@@ -5973,6 +5997,17 @@ fn NewParser_(
p.recordUsage(p.runtime_imports.__HMRClient.?.ref);
}
+ // "React.createElement" and "createElement" become:
+ // import { createElement } from 'react';
+ // "Foo.Bar.createElement" becomes:
+ // import { Bar } from 'foo';
+ // Usages become Bar.createElement
+ if (p.options.jsx.fragment.len > 0)
+ p.jsx_imports.fragment_name = p.options.jsx.fragment[if (p.options.jsx.fragment.len > 1) 1 else 0];
+
+ if (p.options.jsx.factory.len > 0)
+ p.jsx_imports.factory_name = p.options.jsx.factory[if (p.options.jsx.factory.len > 1) 1 else 0];
+
switch (comptime jsx_transform_type) {
.react => {
if (!p.options.bundle) {
@@ -16627,9 +16662,15 @@ fn NewParser_(
inline else => |field| {
const ref: Ref = brk: {
if (@field(jsx_imports, @tagName(field)) == null) {
+ const symbol_name = switch (kind) {
+ .createElement => p.options.jsx.factory[p.options.jsx.factory.len - 1],
+ .Fragment => p.options.jsx.fragment[p.options.jsx.fragment.len - 1],
+ else => @tagName(field),
+ };
+
const loc_ref = LocRef{
.loc = loc,
- .ref = p.newSymbol(.other, @tagName(field)) catch unreachable,
+ .ref = p.newSymbol(.other, symbol_name) catch unreachable,
};
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
diff --git a/src/options.zig b/src/options.zig
index feadc08d7..f8149a0d2 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -884,6 +884,7 @@ pub const ESMConditions = struct {
pub const JSX = struct {
pub const RuntimeMap = bun.ComptimeStringMap(JSX.Runtime, .{
+ .{ "classic", JSX.Runtime.classic },
.{ "react", JSX.Runtime.classic },
.{ "react-jsx", JSX.Runtime.automatic },
.{ "react-jsxDEV", JSX.Runtime.automatic },
@@ -935,6 +936,36 @@ pub const JSX = struct {
return strings.eqlComptime(pragma.package_name, "react") or strings.eqlComptime(pragma.package_name, "@emotion/jsx") or strings.eqlComptime(pragma.package_name, "@emotion/react");
}
+ pub fn setImportSource(pragma: *Pragma, allocator: std.mem.Allocator, suffix: []const u8) void {
+ strings.concatIfNeeded(
+ allocator,
+ &pragma.import_source,
+ &[_]string{
+ pragma.package_name,
+ suffix,
+ },
+ &.{
+ Defaults.ImportSource,
+ Defaults.ImportSourceDev,
+ },
+ ) catch unreachable;
+ }
+
+ pub fn setProduction(pragma: *Pragma, allocator: std.mem.Allocator, is_production: bool) void {
+ pragma.development = !is_production;
+ const package_name = parsePackageName(pragma.import_source);
+ pragma.package_name = package_name;
+ pragma.classic_import_source = package_name;
+
+ if (is_production) {
+ pragma.setImportSource(allocator, "/jsx-runtime");
+ pragma.jsx = "jsx";
+ } else {
+ pragma.setImportSource(allocator, "/jsx-dev-runtime");
+ pragma.jsx = "jsxDEV";
+ }
+ }
+
pub const Defaults = struct {
pub const Factory = &[_]string{"createElement"};
pub const Fragment = &[_]string{"Fragment"};
@@ -950,36 +981,38 @@ pub const JSX = struct {
// saves an allocation for the majority case
pub fn memberListToComponentsIfDifferent(allocator: std.mem.Allocator, original: []const string, new: string) ![]const string {
var splitter = std.mem.split(u8, new, ".");
+ const count = strings.countChar(new, '.') + 1;
var needs_alloc = false;
- var count: usize = 0;
+ var current_i: usize = 0;
while (splitter.next()) |str| {
- const i = (splitter.index orelse break);
- count = i;
- if (i > original.len) {
+ if (str.len == 0) continue;
+ if (current_i >= original.len) {
needs_alloc = true;
break;
}
- if (!strings.eql(original[i], str)) {
+ if (!strings.eql(original[current_i], str)) {
needs_alloc = true;
break;
}
+ current_i += 1;
}
if (!needs_alloc) {
return original;
}
- var out = try allocator.alloc(string, count + 1);
+ var out = try allocator.alloc(string, count);
splitter = std.mem.split(u8, new, ".");
var i: usize = 0;
while (splitter.next()) |str| {
+ if (str.len == 0) continue;
out[i] = str;
i += 1;
}
- return out;
+ return out[0..i];
}
pub fn fromApi(jsx: api.Api.Jsx, allocator: std.mem.Allocator) !Pragma {
@@ -1066,6 +1099,7 @@ pub fn definesFromTransformOptions(
platform: Platform,
loader: ?*DotEnv.Loader,
framework_env: ?*const Env,
+ NODE_ENV: ?string,
) !*defines.Define {
var input_user_define = _input_define orelse std.mem.zeroes(Api.StringMap);
@@ -1105,8 +1139,35 @@ pub fn definesFromTransformOptions(
}
}
- if (input_user_define.keys.len == 0) {
- try user_defines.put(DefaultUserDefines.NodeEnv.Key, DefaultUserDefines.NodeEnv.Value);
+ if (NODE_ENV) |node_env| {
+ if (node_env.len > 0) {
+ var quoted_node_env: string = "";
+ if ((strings.startsWithChar(node_env, '"') and strings.endsWithChar(node_env, '"')) or
+ (strings.startsWithChar(node_env, '\'') and strings.endsWithChar(node_env, '\'')))
+ {
+ quoted_node_env = node_env;
+ } else {
+ // avoid allocating if we can
+ if (strings.eqlComptime(node_env, "production")) {
+ quoted_node_env = "\"production\"";
+ } else if (strings.eqlComptime(node_env, "development")) {
+ quoted_node_env = "\"development\"";
+ } else if (strings.eqlComptime(node_env, "test")) {
+ quoted_node_env = "\"test\"";
+ } else {
+ quoted_node_env = try std.fmt.allocPrint(allocator, "\"{s}\"", .{node_env});
+ }
+ }
+
+ _ = try user_defines.getOrPutValue(
+ "process.env.NODE_ENV",
+ quoted_node_env,
+ );
+ _ = try user_defines.getOrPutValue(
+ "process.env.BUN_ENV",
+ quoted_node_env,
+ );
+ }
}
if (hmr) {
@@ -1379,6 +1440,10 @@ pub const BundleOptions = struct {
this.platform,
loader_,
env,
+ if (loader_) |e|
+ e.map.get("BUN_ENV") orelse e.map.get("NODE_ENV")
+ else
+ null,
);
this.defines_loaded = true;
}
diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig
index dbed959fd..68d3f28c9 100644
--- a/src/resolver/tsconfig_json.zig
+++ b/src/resolver/tsconfig_json.zig
@@ -104,8 +104,9 @@ pub const TSConfigJSON = struct {
log: *logger.Log,
source: logger.Source,
json_cache: *cache.Json,
- is_jsx_development: bool,
+ is_jsx_development_: bool,
) anyerror!?*TSConfigJSON {
+ var is_jsx_development = is_jsx_development_;
// Unfortunately "tsconfig.json" isn't actually JSON. It's some other
// format that appears to be defined by the implementation details of the
// TypeScript compiler.
@@ -160,7 +161,8 @@ pub const TSConfigJSON = struct {
if (options.JSX.RuntimeMap.get(str)) |runtime| {
result.jsx.runtime = runtime;
if (runtime == .automatic) {
- result.jsx.development = strings.eqlComptime(str, "react-jsxDEV");
+ result.jsx.setProduction(allocator, strings.eqlComptime(str, "react-jsxDEV"));
+ is_jsx_development = result.jsx.development;
result.jsx_flags.insert(.development);
}
@@ -178,9 +180,9 @@ pub const TSConfigJSON = struct {
result.jsx_flags.insert(.runtime);
} else {
if (is_jsx_development) {
- result.jsx.import_source = std.fmt.allocPrint(allocator, "{s}/jsx-dev-runtime", .{str}) catch unreachable;
+ result.jsx.setImportSource(allocator, "{s}/jsx-dev-runtime");
} else {
- result.jsx.import_source = std.fmt.allocPrint(allocator, "{s}/jsx-runtime", .{str}) catch unreachable;
+ result.jsx.setImportSource(allocator, "{s}/jsx-runtime");
}
}
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index a611b302e..44d64dc49 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -4318,3 +4318,105 @@ pub fn leftHasAnyInRight(to_check: []const string, against: []const string) bool
}
return false;
}
+
+pub fn hasPrefixWithWordBoundary(input: []const u8, comptime prefix: []const u8) bool {
+ if (hasPrefixComptime(input, prefix)) {
+ if (input.len == prefix.len) return true;
+
+ const next = input[prefix.len..];
+ var bytes: [4]u8 = .{
+ next[0],
+ if (next.len > 1) next[1] else 0,
+ if (next.len > 2) next[2] else 0,
+ if (next.len > 3) next[3] else 0,
+ };
+
+ if (!bun.js_lexer.isIdentifierContinue(decodeWTF8RuneT(&bytes, wtf8ByteSequenceLength(next[0]), i32, -1))) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+pub fn concatWithLength(
+ allocator: std.mem.Allocator,
+ args: []const string,
+ length: usize,
+) !string {
+ var out = try allocator.alloc(u8, length);
+ @memcpy(out.ptr, args[0].ptr, args[0].len);
+ return out;
+}
+
+pub fn concat(
+ allocator: std.mem.Allocator,
+ args: []const string,
+) !string {
+ var length: usize = 0;
+ for (args) |arg| {
+ length += arg.len;
+ }
+ return concatWithLength(allocator, args, length);
+}
+
+pub fn concatIfNeeded(
+ allocator: std.mem.Allocator,
+ dest: *[]const u8,
+ args: []const string,
+ interned_strings_to_check: []const string,
+) !void {
+ const total_length: usize = brk: {
+ var length: usize = 0;
+ for (args) |arg| {
+ length += arg.len;
+ }
+ break :brk length;
+ };
+
+ if (total_length == 0) {
+ dest.* = "";
+ return;
+ }
+
+ if (total_length < 1024) {
+ var stack = std.heap.stackFallback(1024, allocator);
+ const stack_copy = concatWithLength(stack.get(), args, total_length) catch unreachable;
+ for (interned_strings_to_check) |interned| {
+ if (eqlLong(stack_copy, interned, true)) {
+ dest.* = interned;
+ return;
+ }
+ }
+ }
+
+ const is_needed = brk: {
+ var out = dest.*;
+ var remain = out;
+
+ for (args) |arg| {
+ if (args.len > remain.len) {
+ break :brk true;
+ }
+
+ if (eqlLong(remain[0..args.len], arg, true)) {
+ remain = remain[args.len..];
+ } else {
+ break :brk true;
+ }
+ }
+
+ break :brk false;
+ };
+
+ if (!is_needed) return;
+
+ var buf = try allocator.alloc(u8, total_length);
+ dest.* = buf;
+ var remain = buf[0..];
+ for (args) |arg| {
+ @memcpy(remain.ptr, arg.ptr, arg.len);
+ remain = remain[arg.len..];
+ }
+ std.debug.assert(remain.len == 0);
+}