diff options
author | 2023-02-24 15:53:26 +0200 | |
---|---|---|
committer | 2023-02-24 05:53:26 -0800 | |
commit | e887a064fb63347b4a4b21c282c1db01dfee98b1 (patch) | |
tree | 6270a7ce5527ea06d709d4b92e14623518e0f5b5 | |
parent | 6e4908e51793d82d3b6924b2ede9a02f1e95bf37 (diff) | |
download | bun-e887a064fb63347b4a4b21c282c1db01dfee98b1.tar.gz bun-e887a064fb63347b4a4b21c282c1db01dfee98b1.tar.zst bun-e887a064fb63347b4a4b21c282c1db01dfee98b1.zip |
prefer `bun.copy()` over `std.mem.copy()` (#2152)
55 files changed, 546 insertions, 572 deletions
@@ -147,8 +147,8 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \ $(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \ -DCMAKE_AR=$(AR) \ -DCMAKE_RANLIB=$(which llvm-15-ranlib || which llvm-ranlib) - - + + CMAKE_FLAGS = $(CMAKE_FLAGS_WITHOUT_RELEASE) -DCMAKE_BUILD_TYPE=Release @@ -719,7 +719,7 @@ wasm: api build-obj-wasm-small .PHONY: build-obj-safe build-obj-safe: - $(ZIG) build obj -Doptimize=ReleaseSafe + $(ZIG) build obj -Doptimize=ReleaseSafe -Dcpu="$(CPU_TARGET)" UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions @@ -1076,11 +1076,11 @@ release-bin-dir: .PHONY: dev-obj dev-obj: - $(ZIG) build obj --prominent-compile-errors -freference-trace + $(ZIG) build obj --prominent-compile-errors -freference-trace -Dcpu="$(CPU_TARGET)" .PHONY: dev-obj-linux dev-obj-linux: - $(ZIG) build obj -Dtarget=x86_64-linux-gnu + $(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)" .PHONY: dev dev: mkdir-dev dev-obj bun-link-lld-debug @@ -317,7 +317,7 @@ pub fn build(b: *Build) !void { obj.linkLibC(); obj.strip = false; - obj.bundle_compiler_rt = false; + obj.bundle_compiler_rt = true; obj.omit_frame_pointer = optimize != .Debug; if (b.option(bool, "for-editor", "Do not emit bin, just check for errors") orelse false) { diff --git a/src/allocators.zig b/src/allocators.zig index f7d05b849..b004fa926 100644 --- a/src/allocators.zig +++ b/src/allocators.zig @@ -391,13 +391,13 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type instance.backing_buf[instance.backing_buf_used - 1] = 0; }, []const u8, []u8, [:0]const u8, [:0]u8 => { - std.mem.copy(u8, instance.backing_buf[start .. instance.backing_buf_used - 1], _value); + bun.copy(u8, instance.backing_buf[start .. instance.backing_buf_used - 1], _value); instance.backing_buf[instance.backing_buf_used - 1] = 0; }, else => { var remainder = instance.backing_buf[start..]; for (_value) |val| { - std.mem.copy(u8, remainder, val); + bun.copy(u8, remainder, val); remainder = remainder[val.len..]; } remainder[0] = 0; @@ -411,12 +411,12 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type switch (comptime AppendType) { EmptyType => {}, []const u8, []u8, [:0]const u8, [:0]u8 => { - std.mem.copy(u8, value_buf, _value); + bun.copy(u8, value_buf, _value); }, else => { var remainder = value_buf; for (_value) |val| { - std.mem.copy(u8, remainder, val); + bun.copy(u8, remainder, val); remainder = remainder[val.len..]; } }, @@ -683,7 +683,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_ const start = instance.key_list_buffer_used; instance.key_list_buffer_used += key.len; slice = instance.key_list_buffer[start..instance.key_list_buffer_used]; - std.mem.copy(u8, slice, key); + bun.copy(u8, slice, key); } else { slice = try self.map.allocator.dupe(u8, key); } diff --git a/src/builder.zig b/src/builder.zig deleted file mode 100644 index 10c81aaf8..000000000 --- a/src/builder.zig +++ /dev/null @@ -1,33 +0,0 @@ -const Allocator = @import("std").mem.Allocator; -const assert = @import("std").debug.assert; -const copy = @import("std").mem.copy; -const io = @import("bun").AsyncIO; -pub fn Builder(comptime Type: type) type { - return struct { - const This = @This(); - - len: usize = 0, - cap: usize = 0, - ptr: ?[*]Type = null, - - pub fn count(this: *This, slice: Type) void { - this.cap += slice.len; - } - - pub fn allocate(this: *This, allocator: Allocator) !void { - var slice = try allocator.alloc(Type, this.cap); - this.ptr = slice.ptr; - this.len = 0; - } - - pub fn append(this: *This, item: Type) *const Type { - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first - var result = &this.ptr.?[this.len]; - result.* = item; - this.len += 1; - assert(this.len <= this.cap); - return result; - } - }; -} diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 05684902b..54d54f644 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -391,7 +391,7 @@ pub const ZigString = extern struct { return ""; } - std.mem.copy(u8, buf[0..this.len], this.slice()); + bun.copy(u8, buf, this.slice()); buf[this.len] = 0; return bun.cstring(buf[0..this.len]); } diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index c87a80571..f9a9a3467 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -466,7 +466,7 @@ pub const ZigStackTrace = extern struct { var remain_buf = source_line_buf[0..]; var i: usize = 0; while (source_lines_iter.next()) |source| { - std.mem.copy(u8, remain_buf, source.text); + bun.copy(u8, remain_buf, source.text); const copied_line = remain_buf[0..source.text.len]; remain_buf = remain_buf[source.text.len..]; source_lines[i] = .{ .text = copied_line, .line = source.line }; diff --git a/src/bun.js/node/buffer.zig b/src/bun.js/node/buffer.zig index 8ede45f5d..5a7d64955 100644 --- a/src/bun.js/node/buffer.zig +++ b/src/bun.js/node/buffer.zig @@ -78,7 +78,7 @@ pub const BufferVectorized = struct { const minimum_contents = contents; while (buf.len >= contents.len) { const min_len = @min(contents.len, buf.len); - std.mem.copy(u8, buf[0..min_len], contents[0..min_len]); + bun.copy(u8, buf, contents[0..min_len]); if (buf.len <= contents.len) { break; } @@ -88,7 +88,7 @@ pub const BufferVectorized = struct { while (buf.len > 0) { const to_fill = @min(minimum_contents.len, buf.len); - std.mem.copy(u8, buf[0..to_fill], minimum_contents[0..to_fill]); + bun.copy(u8, buf, minimum_contents[0..to_fill]); buf = buf[to_fill..]; } } diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index 4478337b5..ebb81f928 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -675,7 +675,7 @@ pub const Crypto = struct { 0 => {}, // 512 bytes or less we reuse from the same cache as UUID generation. 1...JSC.RareData.EntropyCache.size / 8 => { - std.mem.copy(u8, slice, globalThis.bunVM().rareData().entropySlice(slice.len)); + bun.copy(u8, slice, globalThis.bunVM().rareData().entropySlice(slice.len)); }, else => { bun.rand(slice); diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 7ba7198c7..58a1dcac8 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -1026,21 +1026,15 @@ pub const Fetch = struct { url = ZigURL.parse(getAllocator(ctx).dupe(u8, request.url) catch unreachable); url_proxy_buffer = url.href; } else { - var total_len = request.url.len + proxy_url_zig.len; - - const allocator = getAllocator(ctx); - - var buffer = allocator.alloc(u8, total_len) catch { + var buffer = getAllocator(ctx).alloc(u8, request.url.len + proxy_url_zig.len) catch { JSC.JSError(bun.default_allocator, "Out of memory", .{}, ctx, exception); return null; }; + @memcpy(buffer.ptr, request.url.ptr, request.url.len); + var proxy_url_slice = buffer[request.url.len..]; + @memcpy(proxy_url_slice.ptr, proxy_url_zig.ptr, proxy_url_zig.len); - var url_slice = buffer[0..request.url.len]; - std.mem.copy(u8, url_slice, request.url); - var proxy_url_slice = buffer[request.url.len..buffer.len]; - std.mem.copy(u8, proxy_url_slice, proxy_url_zig.ptr[0..proxy_url_zig.len]); - - url = ZigURL.parse(url_slice); + url = ZigURL.parse(buffer[0..request.url.len]); proxy = ZigURL.parse(proxy_url_slice); url_proxy_buffer = buffer; } @@ -1167,21 +1161,15 @@ pub const Fetch = struct { url = ZigURL.parse(url_slice.slice()); url_proxy_buffer = url.href; } else { - const allocator = getAllocator(ctx); - - var total_len = url_zig.len + proxy_url_zig.len; - var buffer = allocator.alloc(u8, total_len) catch { + var buffer = getAllocator(ctx).alloc(u8, url_zig.len + proxy_url_zig.len) catch { JSC.JSError(bun.default_allocator, "Out of memory", .{}, ctx, exception); return null; }; + @memcpy(buffer.ptr, url_zig.ptr, url_zig.len); + var proxy_url_slice = buffer[url_zig.len..]; + @memcpy(proxy_url_slice.ptr, proxy_url_zig.ptr, proxy_url_zig.len); - var url_slice = buffer[0..url_zig.len]; - - std.mem.copy(u8, url_slice, url_zig.ptr[0..url_zig.len]); - var proxy_url_slice = buffer[url_zig.len..buffer.len]; - std.mem.copy(u8, proxy_url_slice, proxy_url_zig.ptr[0..proxy_url_zig.len]); - - url = ZigURL.parse(url_slice); + url = ZigURL.parse(buffer[0..url_zig.len]); proxy = ZigURL.parse(proxy_url_slice); url_proxy_buffer = buffer; } diff --git a/src/bun.zig b/src/bun.zig index 5d1aac9b5..df3d1a72f 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -385,38 +385,80 @@ pub inline fn range(comptime min: anytype, comptime max: anytype) [max - min]usi } pub fn copy(comptime Type: type, dest: []Type, src: []const Type) void { - std.debug.assert(dest.len >= src.len); - var input = std.mem.sliceAsBytes(src); - var output = std.mem.sliceAsBytes(dest); - var input_end = input.ptr + input.len; - const output_end = output.ptr + output.len; - - if (@ptrToInt(input.ptr) <= @ptrToInt(output.ptr) and @ptrToInt(output_end) <= @ptrToInt(input_end)) { - // // input is overlapping with output - if (input.len > strings.ascii_vector_size) { - const input_end_vectorized = input.ptr + input.len - (input.len % strings.ascii_vector_size); - while (input.ptr != input_end_vectorized) { - const input_vec = @as(@Vector(strings.ascii_vector_size, u8), input[0..strings.ascii_vector_size].*); - output[0..strings.ascii_vector_size].* = input_vec; + if (comptime Environment.allow_assert) std.debug.assert(dest.len >= src.len); + if (src.ptr == dest.ptr) return; + + var input: []const u8 = std.mem.sliceAsBytes(src); + var output: []u8 = std.mem.sliceAsBytes(dest); + + if (@ptrToInt(output.ptr) < @ptrToInt(input.ptr)) { + const output_end = output.ptr + input.len; + + // |--- dest ---| + // |--- src ---| + if (@ptrToInt(input.ptr) < @ptrToInt(output_end)) brk: { + while (input.len >= strings.ascii_vector_size) { + const vec = @as(@Vector(strings.ascii_vector_size, u8), input[0..strings.ascii_vector_size].*); + output[0..strings.ascii_vector_size].* = vec; input = input[strings.ascii_vector_size..]; output = output[strings.ascii_vector_size..]; + if (@ptrToInt(input.ptr) >= @ptrToInt(output_end)) break :brk; } - } - while (input.len >= @sizeOf(usize)) { - output[0..@sizeOf(usize)].* = input[0..@sizeOf(usize)].*; - input = input[@sizeOf(usize)..]; - output = output[@sizeOf(usize)..]; - } + while (input.len >= @sizeOf(usize)) { + output[0..@sizeOf(usize)].* = input[0..@sizeOf(usize)].*; + input = input[@sizeOf(usize)..]; + output = output[@sizeOf(usize)..]; + if (@ptrToInt(input.ptr) >= @ptrToInt(output_end)) break :brk; + } - while (input.ptr != input_end) { - output[0] = input[0]; - input = input[1..]; - output = output[1..]; + while (input.len > 0) { + output[0] = input[0]; + input = input[1..]; + output = output[1..]; + if (@ptrToInt(input.ptr) >= @ptrToInt(output_end)) break :brk; + } } } else { - @memcpy(output.ptr, input.ptr, input.len); + var input_end = input.ptr + input.len; + + // |--- src ---| + // |--- dest ---| + if (@ptrToInt(output.ptr) < @ptrToInt(input_end)) brk: { + while (input.len >= strings.ascii_vector_size) { + const input_start = input.len - strings.ascii_vector_size; + const output_start = output.len - strings.ascii_vector_size; + const vec = @as(@Vector(strings.ascii_vector_size, u8), input[input_start..][0..strings.ascii_vector_size].*); + output[output_start..][0..strings.ascii_vector_size].* = vec; + input = input[0..input_start]; + output = output[0..output_start]; + input_end -= strings.ascii_vector_size; + if (@ptrToInt(output.ptr) >= @ptrToInt(input_end)) break :brk; + } + + while (input.len >= @sizeOf(usize)) { + const input_start = input.len - @sizeOf(usize); + const output_start = output.len - @sizeOf(usize); + output[output_start..][0..@sizeOf(usize)].* = input[input_start..][0..@sizeOf(usize)].*; + input = input[0..input_start]; + output = output[0..output_start]; + input_end -= @sizeOf(usize); + if (@ptrToInt(output.ptr) >= @ptrToInt(input_end)) break :brk; + } + + while (input.len >= @sizeOf(usize)) { + const input_start = input.len - 1; + const output_start = output.len - 1; + output[output_start] = input[input_start]; + input = input[0..input_start]; + output = output[0..output_start]; + input_end -= 1; + if (@ptrToInt(output.ptr) >= @ptrToInt(input_end)) break :brk; + } + } } + + @memcpy(output.ptr, input.ptr, input.len); } pub const hasCloneFn = std.meta.trait.multiTrait(.{ std.meta.trait.isContainer, std.meta.trait.hasFn("clone") }); diff --git a/src/bundler.zig b/src/bundler.zig index e1b791b45..a43d29c54 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -1115,8 +1115,8 @@ pub const Bundler = struct { .wasm, .file, .napi => { var hashed_name = try bundler.linker.getHashedFilename(file_path, null); var pathname = try bundler.allocator.alloc(u8, hashed_name.len + file_path.name.ext.len); - std.mem.copy(u8, pathname, hashed_name); - std.mem.copy(u8, pathname[hashed_name.len..], file_path.name.ext); + bun.copy(u8, pathname, hashed_name); + bun.copy(u8, pathname[hashed_name.len..], file_path.name.ext); const dir = if (bundler.options.output_dir_handle) |output_handle| output_handle.fd else 0; output_file.value = .{ @@ -1617,16 +1617,18 @@ pub const Bundler = struct { if (strings.eqlComptime(absolute_pathname_pathname.ext, ".entry")) { const trail_dir = absolute_pathname.dirWithTrailingSlash(); - var len: usize = trail_dir.len; - std.mem.copy(u8, tmp_buildfile_buf2[0..len], trail_dir); + var len = trail_dir.len; - std.mem.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname_pathname.base); + bun.copy(u8, &tmp_buildfile_buf2, trail_dir); + bun.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname_pathname.base); len += absolute_pathname_pathname.base.len; - std.mem.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname.ext); + bun.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname.ext); len += absolute_pathname.ext.len; - std.debug.assert(len > 0); + + if (comptime Environment.allow_assert) std.debug.assert(len > 0); + const decoded_entry_point_path = tmp_buildfile_buf2[0..len]; - break :brk (try bundler.resolver.resolve(bundler.fs.top_level_dir, decoded_entry_point_path, .entry_point)); + break :brk try bundler.resolver.resolve(bundler.fs.top_level_dir, decoded_entry_point_path, .entry_point); } } @@ -1694,7 +1696,7 @@ pub const Bundler = struct { var __entry = bundler.allocator.alloc(u8, "./".len + entry.len) catch unreachable; __entry[0] = '.'; __entry[1] = '/'; - std.mem.copy(u8, __entry[2..__entry.len], entry); + bun.copy(u8, __entry[2..__entry.len], entry); entry = __entry; } diff --git a/src/bundler/entry_points.zig b/src/bundler/entry_points.zig index 9b52596f5..a501bcddb 100644 --- a/src/bundler/entry_points.zig +++ b/src/bundler/entry_points.zig @@ -85,9 +85,9 @@ pub const ClientEntryPoint = struct { var joined_base_and_dir_parts = [_]string{ original_path.dir, original_path.base }; var generated_path = Fs.FileSystem.instance.absBuf(&joined_base_and_dir_parts, outbuffer); - std.mem.copy(u8, outbuffer[generated_path.len..], ".entry"); + bun.copy(u8, outbuffer[generated_path.len..], ".entry"); generated_path = outbuffer[0 .. generated_path.len + ".entry".len]; - std.mem.copy(u8, outbuffer[generated_path.len..], original_path.ext); + bun.copy(u8, outbuffer[generated_path.len..], original_path.ext); return outbuffer[0 .. generated_path.len + original_path.ext.len]; } @@ -99,7 +99,7 @@ pub const ClientEntryPoint = struct { original_ext = original_path.ext[entry_i + "entry".len ..]; } - std.mem.copy(u8, outbuffer[generated_path.len..], original_ext); + bun.copy(u8, outbuffer[generated_path.len..], original_ext); return outbuffer[0 .. generated_path.len + original_ext.len]; } @@ -299,7 +299,7 @@ pub const MacroEntryPoint = struct { macro_label_: string, ) !void { const dir_to_use: string = import_path.dirWithTrailingSlash(); - std.mem.copy(u8, entry.code_buffer[0..macro_label_.len], macro_label_); + bun.copy(u8, &entry.code_buffer, macro_label_); const macro_label = entry.code_buffer[0..macro_label_.len]; const code = try std.fmt.bufPrint( diff --git a/src/bundler/generate_node_modules_bundle.zig b/src/bundler/generate_node_modules_bundle.zig index 321777a20..69216ae72 100644 --- a/src/bundler/generate_node_modules_bundle.zig +++ b/src/bundler/generate_node_modules_bundle.zig @@ -334,7 +334,7 @@ pub const magic_bytes = "#!/usr/bin/env bun\n\n"; // Then, you add that number to initial_header.len const initial_header = brk: { var buf = std.mem.zeroes([magic_bytes.len + 5]u8); - // std.mem.copy(u8, &buf, magic_bytes); + // bun.copy(u8, &buf, magic_bytes); // var remainder = buf[magic_bytes.len..]; // // Write an invalid byte offset to be updated after we finish generating the code // std.mem.writeIntNative(u32, remainder[0 .. remainder.len - 1], 0); @@ -454,8 +454,8 @@ pub fn generate( var i: u16 = 0; inner: for (bundle_keys, 0..) |name, k| { - std.mem.copy(u8, &Bundler.tmp_buildfile_buf, name); - std.mem.copy(u8, Bundler.tmp_buildfile_buf[name.len..], "/package.json"); + bun.copy(u8, &Bundler.tmp_buildfile_buf, name); + bun.copy(u8, Bundler.tmp_buildfile_buf[name.len..], "/package.json"); const package_json_import = Bundler.tmp_buildfile_buf[0 .. name.len + "/package.json".len]; const result = bundler.resolver.resolve(bundler.fs.top_level_dir, package_json_import, .stmt) catch |err| { generator.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving always bundled module \"{s}\"", .{ @errorName(err), name }) catch unreachable; @@ -680,7 +680,7 @@ pub fn generate( Output.prettyErrorln( \\<r><red>error<r>: no dependencies to bundle! \\ - \\"bun bun" currently only bundles dependencies in node_modules. + \\"bun bun" currently only bundles dependencies in node_modules. \\ , .{}, @@ -969,9 +969,9 @@ const BundledModuleData = struct { var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path; - std.mem.copy(u8, buf_to_use, pkg.name); + bun.copy(u8, buf_to_use, pkg.name); buf_to_use[pkg.name.len] = '/'; - std.mem.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); + bun.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1]; return BundledModuleData{ .import_path = import_path, @@ -1005,9 +1005,9 @@ const BundledModuleData = struct { var buf_to_use: []u8 = if (is_main) &normalized_package_path2 else &normalized_package_path; - std.mem.copy(u8, buf_to_use, pkg.name); + bun.copy(u8, buf_to_use, pkg.name); buf_to_use[pkg.name.len] = '/'; - std.mem.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); + bun.copy(u8, buf_to_use[pkg.name.len + 1 ..], import_path); package_path = buf_to_use[0 .. pkg.name.len + import_path.len + 1]; return BundledModuleData{ .import_path = import_path, diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index c804316dc..92d2d5b3a 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -97,7 +97,7 @@ pub const BuildCommand = struct { }, .move => |value| { // const primary = f.input.text[from_path.len..]; - // std.mem.copy(u8, filepath_buf[2..], primary); + // bun.copy(u8, filepath_buf[2..], primary); // rel_path = filepath_buf[0 .. primary.len + 2]; rel_path = value.pathname; diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index af369fb8b..0c76a9c01 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -1251,7 +1251,7 @@ pub const CreateCommand = struct { var entry_point_file_path_base = filesystem.absBuf(&entry_point_file_parts, &bun_path_buf); for (file_extensions_to_try) |ext| { - std.mem.copy(u8, bun_path_buf[entry_point_file_path_base.len..], ext); + bun.copy(u8, bun_path_buf[entry_point_file_path_base.len..], ext); entry_point_path = bun_path_buf[0 .. entry_point_file_path_base.len + ext.len]; std.fs.accessAbsolute(entry_point_path, .{}) catch continue; found_file = true; @@ -1303,7 +1303,7 @@ pub const CreateCommand = struct { // const head_i: usize = std.mem.indexOf(u8, outfile, "<head>") orelse break :inject_css; // if (std.mem.indexOf(u8, outfile, "/src/index.css") != null) break :inject_css; - // std.mem.copy(u8, bun_path_buf[destination.len + "/src/index".len ..], ".css"); + // bun.copy(u8, bun_path_buf[destination.len + "/src/index".len ..], ".css"); // var index_css_file_path = bun_path_buf[0 .. destination.len + "/src/index.css".len]; // std.fs.accessAbsolute(index_css_file_path, .{}) catch break :inject_css; // var list = std.ArrayList(u8).fromOwnedSlice(ctx.allocator, outfile); @@ -1761,9 +1761,9 @@ pub const Example = struct { } } - std.mem.copy(u8, &home_dir_buf, entry.name); + bun.copy(u8, &home_dir_buf, entry.name); home_dir_buf[entry.name.len] = std.fs.path.sep; - std.mem.copy(u8, home_dir_buf[entry.name.len + 1 ..], "package.json"); + bun.copy(u8, home_dir_buf[entry.name.len + 1 ..], "package.json"); home_dir_buf[entry.name.len + 1 + "package.json".len] = 0; var path: [:0]u8 = home_dir_buf[0 .. entry.name.len + 1 + "package.json".len :0]; diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig index 392414686..27e921526 100644 --- a/src/cli/install_completions_command.zig +++ b/src/cli/install_completions_command.zig @@ -395,8 +395,8 @@ pub const InstallCompletionsCommand = struct { // $ZDOTDIR/.zlogout if (bun.getenvZ("ZDOTDIR")) |zdot_dir| { - std.mem.copy(u8, &zshrc_filepath, zdot_dir); - std.mem.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshrc"); + bun.copy(u8, &zshrc_filepath, zdot_dir); + bun.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshrc"); zshrc_filepath[zdot_dir.len + "/.zshrc".len] = 0; var filepath = zshrc_filepath[0 .. zdot_dir.len + "/.zshrc".len :0]; break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .mode = .read_write }) catch break :first; @@ -405,8 +405,8 @@ pub const InstallCompletionsCommand = struct { second: { if (bun.getenvZ("HOME")) |zdot_dir| { - std.mem.copy(u8, &zshrc_filepath, zdot_dir); - std.mem.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshrc"); + bun.copy(u8, &zshrc_filepath, zdot_dir); + bun.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshrc"); zshrc_filepath[zdot_dir.len + "/.zshrc".len] = 0; var filepath = zshrc_filepath[0 .. zdot_dir.len + "/.zshrc".len :0]; break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .mode = .read_write }) catch break :second; @@ -415,8 +415,8 @@ pub const InstallCompletionsCommand = struct { third: { if (bun.getenvZ("HOME")) |zdot_dir| { - std.mem.copy(u8, &zshrc_filepath, zdot_dir); - std.mem.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshenv"); + bun.copy(u8, &zshrc_filepath, zdot_dir); + bun.copy(u8, zshrc_filepath[zdot_dir.len..], "/.zshenv"); zshrc_filepath[zdot_dir.len + "/.zshenv".len] = 0; var filepath = zshrc_filepath[0 .. zdot_dir.len + "/.zshenv".len :0]; break :zshrc std.fs.openFileAbsoluteZ(filepath, .{ .mode = .read_write }) catch break :third; diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index a3d3a4cb6..ec9936fbf 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -173,10 +173,10 @@ pub const PackageManagerCommand = struct { defer directories.deinit(); while (iterator.nextNodeModulesFolder()) |node_modules| { const path = try ctx.allocator.alloc(u8, node_modules.relative_path.len); - std.mem.copy(u8, path, node_modules.relative_path); + bun.copy(u8, path, node_modules.relative_path); const dependencies = try ctx.allocator.alloc(DependencyID, node_modules.dependencies.len); - std.mem.copy(PackageID, dependencies, node_modules.dependencies); + bun.copy(PackageID, dependencies, node_modules.dependencies); const folder = NodeModulesFolder{ .relative_path = @ptrCast(stringZ, path), @@ -310,9 +310,8 @@ fn printNodeModulesFolderStructure( for (directory.dependencies, 0..) |dependency_id, index| { const package_name_ = lockfile.buffers.dependencies.items[dependency_id].name.slice(string_bytes); - const package_name = allocator.alloc(u8, package_name_.len) catch unreachable; + const package_name = allocator.dupe(u8, package_name_) catch unreachable; defer allocator.free(package_name); - std.mem.copy(u8, package_name, package_name_); var possible_path = std.fmt.allocPrint(allocator, "{s}/{s}/node_modules", .{ directory.relative_path, package_name }) catch unreachable; defer allocator.free(possible_path); diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 0da28b21a..8c5b98b0f 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -228,17 +228,17 @@ pub const RunCommand = struct { log("Script: \"{s}\"", .{combined_script}); if (passthrough.len > 0) { - var combined_script_len: usize = script.len; + var combined_script_len = script.len; for (passthrough) |p| { combined_script_len += p.len + 1; } var combined_script_buf = try allocator.alloc(u8, combined_script_len); - std.mem.copy(u8, combined_script_buf, script); + bun.copy(u8, combined_script_buf, script); var remaining_script_buf = combined_script_buf[script.len..]; for (passthrough) |part| { var p = part; remaining_script_buf[0] = ' '; - std.mem.copy(u8, remaining_script_buf[1..], p); + bun.copy(u8, remaining_script_buf[1..], p); remaining_script_buf = remaining_script_buf[p.len + 1 ..]; } combined_script = combined_script_buf; @@ -701,7 +701,7 @@ pub const RunCommand = struct { const value = entry.value_ptr.*; if (value.kind(&this_bundler.fs.fs) == .file) { if (!has_copied) { - std.mem.copy(u8, &path_buf, value.dir); + bun.copy(u8, &path_buf, value.dir); dir_slice = path_buf[0..value.dir.len]; if (!strings.endsWithChar(value.dir, std.fs.path.sep)) { dir_slice = path_buf[0 .. value.dir.len + 1]; @@ -710,7 +710,7 @@ pub const RunCommand = struct { } const base = value.base(); - std.mem.copy(u8, path_buf[dir_slice.len..], base); + bun.copy(u8, path_buf[dir_slice.len..], base); path_buf[dir_slice.len + base.len] = 0; var slice = path_buf[0 .. dir_slice.len + base.len :0]; std.os.accessZ(slice, std.os.X_OK) catch continue; @@ -1001,7 +1001,7 @@ pub const RunCommand = struct { ctx.debug.silent, )) return false; - std.mem.copy(u8, temp_script_buffer, "post"); + temp_script_buffer[0.."post".len].* = "post".*; if (scripts.get(temp_script_buffer)) |postscript| { if (!try runPackageScript( diff --git a/src/deps/diffz/DiffMatchPatch.zig b/src/deps/diffz/DiffMatchPatch.zig index a85bc950d..4603aee1b 100644 --- a/src/deps/diffz/DiffMatchPatch.zig +++ b/src/deps/diffz/DiffMatchPatch.zig @@ -23,6 +23,7 @@ const DiffMatchPatch = @This(); const std = @import("std"); +const bun = @import("bun"); const testing = std.testing; const ArrayListUnmanaged = std.ArrayListUnmanaged; const DiffList = ArrayListUnmanaged(Diff); @@ -817,8 +818,8 @@ fn diffCleanupMerge(allocator: std.mem.Allocator, diffs: *DiffList) DiffError!vo var nt = try allocator.alloc(u8, diffs.items[ii].text.len + common_length); // try diffs.items[pointer - count_delete - count_insert - 1].text.append(allocator, text_insert.items[0..common_length]); - std.mem.copy(u8, nt, diffs.items[ii].text); - std.mem.copy(u8, nt[diffs.items[ii].text.len..], text_insert.items[0..common_length]); + bun.copy(u8, nt, diffs.items[ii].text); + bun.copy(u8, nt[diffs.items[ii].text.len..], text_insert.items[0..common_length]); // allocator.free(diffs.items[ii].text); diffs.items[ii].text = nt; @@ -870,8 +871,8 @@ fn diffCleanupMerge(allocator: std.mem.Allocator, diffs: *DiffList) DiffError!vo var nt = try allocator.alloc(u8, diffs.items[pointer - 1].text.len + diffs.items[pointer].text.len); // try diffs.items[pointer - count_delete - count_insert - 1].text.append(allocator, text_insert.items[0..common_length]); - std.mem.copy(u8, nt, diffs.items[pointer - 1].text); - std.mem.copy(u8, nt[diffs.items[pointer - 1].text.len..], diffs.items[pointer].text); + bun.copy(u8, nt, diffs.items[pointer - 1].text); + bun.copy(u8, nt[diffs.items[pointer - 1].text.len..], diffs.items[pointer].text); // allocator.free(diffs.items[pointer - 1].text); diffs.items[pointer - 1].text = nt; diff --git a/src/env_loader.zig b/src/env_loader.zig index 999ed9c12..41fb25fe5 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -911,9 +911,9 @@ pub const Map = struct { var i: usize = 0; while (it.next()) |pair| : (i += 1) { const env_buf = try arena.allocSentinel(u8, pair.key_ptr.len + pair.value_ptr.len + 1, 0); - std.mem.copy(u8, env_buf, pair.key_ptr.*); + bun.copy(u8, env_buf, pair.key_ptr.*); env_buf[pair.key_ptr.len] = '='; - std.mem.copy(u8, env_buf[pair.key_ptr.len + 1 ..], pair.value_ptr.*); + bun.copy(u8, env_buf[pair.key_ptr.len + 1 ..], pair.value_ptr.*); envp_buf[i] = env_buf.ptr; } std.debug.assert(i == envp_count); diff --git a/src/exact_size_matcher.zig b/src/exact_size_matcher.zig index 176f8afb7..5d47cee98 100644 --- a/src/exact_size_matcher.zig +++ b/src/exact_size_matcher.zig @@ -65,8 +65,7 @@ pub fn ExactSizeMatcher(comptime max_bytes: usize) type { pub fn case(comptime str: []const u8) T { if (str.len < max_bytes) { var bytes = std.mem.zeroes([max_bytes]u8); - const slice_bytes = std.mem.sliceAsBytes(str); - std.mem.copy(u8, &bytes, slice_bytes); + bytes[0..str.len].* = str[0..str.len].*; return std.mem.readIntNative(T, &bytes); } else if (str.len == max_bytes) { return std.mem.readIntNative(T, str[0..str.len]); diff --git a/src/fs.zig b/src/fs.zig index c1cd6d061..ac5175784 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -59,8 +59,8 @@ pub const BytecodeCacheFetcher = struct { .Unknown => { var basename_buf: [512]u8 = undefined; var pathname = Fs.PathName.init(sourcename); - std.mem.copy(u8, &basename_buf, pathname.base); - std.mem.copy(u8, basename_buf[pathname.base.len..], ".bytecode"); + bun.copy(u8, &basename_buf, pathname.base); + bun.copy(u8, basename_buf[pathname.base.len..], ".bytecode"); const basename = basename_buf[0 .. pathname.base.len + ".bytecode".len]; if (fs.fetchCacheFile(basename)) |cache_file| { @@ -162,7 +162,7 @@ pub const FileSystem = struct { // This makes path resolution more reliable if (!std.fs.path.isSep(_top_level_dir[_top_level_dir.len - 1])) { const tld = try allocator.alloc(u8, _top_level_dir.len + 1); - std.mem.copy(u8, tld, _top_level_dir); + bun.copy(u8, tld, _top_level_dir); tld[tld.len - 1] = std.fs.path.sep; // if (!isBrowser) { // allocator.free(_top_level_dir); @@ -1256,10 +1256,10 @@ pub const Path = struct { return new_path; } else { var buf = try allocator.alloc(u8, this.text.len + this.pretty.len + 2); - std.mem.copy(u8, buf, this.text); + bun.copy(u8, buf, this.text); buf.ptr[this.text.len] = 0; var new_pretty = buf[this.text.len + 1 ..]; - std.mem.copy(u8, buf[this.text.len + 1 ..], this.pretty); + bun.copy(u8, buf[this.text.len + 1 ..], this.pretty); var new_path = Fs.Path.init(buf[0..this.text.len]); buf.ptr[buf.len - 1] = 0; new_path.pretty = new_pretty; diff --git a/src/http.zig b/src/http.zig index 5a2847357..167e2658f 100644 --- a/src/http.zig +++ b/src/http.zig @@ -494,8 +494,8 @@ pub const RequestContext = struct { // Is it the index file? if (relative_unrooted_path.len == 0) { - // std.mem.copy(u8, &tmp_buildfile_buf, relative_unrooted_path); - // std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/" + // bun.copy(u8, &tmp_buildfile_buf, relative_unrooted_path); + // bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/" // Search for /index.html if (this.bundler.options.routes.single_page_app_routing and this.bundler.options.routes.single_page_app_fd != 0) @@ -520,8 +520,8 @@ pub const RequestContext = struct { while (_file == null and relative_unrooted_path.len > 1) { // When no extension is provided, it might be html if (extension.len == 0) { - std.mem.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]); - std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], ".html"); + bun.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]); + bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], ".html"); if (public_dir.openFile(tmp_buildfile_buf[0 .. relative_unrooted_path.len + ".html".len], .{})) |file| { _file = file; @@ -531,12 +531,12 @@ pub const RequestContext = struct { var _path: []u8 = undefined; if (relative_unrooted_path[relative_unrooted_path.len - 1] == '/') { - std.mem.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0 .. relative_unrooted_path.len - 1]); - std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len - 1 ..], "/index.html"); + bun.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0 .. relative_unrooted_path.len - 1]); + bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len - 1 ..], "/index.html"); _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len - 1 + "/index.html".len]; } else { - std.mem.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]); - std.mem.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/index.html"); + bun.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]); + bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/index.html"); _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len + "/index.html".len]; } diff --git a/src/http/url_path.zig b/src/http/url_path.zig index 24bcea76b..ba8bee297 100644 --- a/src/http/url_path.zig +++ b/src/http/url_path.zig @@ -69,7 +69,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath { possibly_encoded_pathname.len, )]; - std.mem.copy(u8, possibly_encoded_pathname, possibly_encoded_pathname_[0..possibly_encoded_pathname.len]); + bun.copy(u8, possibly_encoded_pathname, possibly_encoded_pathname_[0..possibly_encoded_pathname.len]); var clone = possibly_encoded_pathname[0..possibly_encoded_pathname.len]; var fbs = std.io.fixedBufferStream( diff --git a/src/http_client_async.zig b/src/http_client_async.zig index 28a80700d..7060c3ed2 100644 --- a/src/http_client_async.zig +++ b/src/http_client_async.zig @@ -1051,22 +1051,33 @@ const os = std.os; // lowercase hash header names so that we can be sure pub fn hashHeaderName(name: string) u64 { var hasher = std.hash.Wyhash.init(0); - var remain: string = name; - var buf: [32]u8 = undefined; - var buf_slice: []u8 = buf[0..32]; + var remain = name; + var buf: [hasher.buf.len]u8 = undefined; while (remain.len > 0) { const end = @min(hasher.buf.len, remain.len); - hasher.update(strings.copyLowercase(remain[0..end], buf_slice)); + hasher.update(strings.copyLowercase(remain[0..end], &buf)); remain = remain[end..]; } return hasher.final(); } -const host_header_hash = hashHeaderName("Host"); -const connection_header_hash = hashHeaderName("Connection"); +pub fn hashHeaderConst(comptime name: string) u64 { + var hasher = std.hash.Wyhash.init(0); + var remain = name; + var buf: [hasher.buf.len]u8 = undefined; + + while (remain.len > 0) { + const end = @min(hasher.buf.len, remain.len); + + hasher.update(std.ascii.lowerString(&buf, remain[0..end])); + remain = remain[end..]; + } + + return hasher.final(); +} pub const Encoding = enum { identity, @@ -1084,16 +1095,11 @@ pub const Encoding = enum { } }; -const content_encoding_hash = hashHeaderName("Content-Encoding"); -const transfer_encoding_header = hashHeaderName("Transfer-Encoding"); - const host_header_name = "Host"; const content_length_header_name = "Content-Length"; -const content_length_header_hash = hashHeaderName("Content-Length"); const connection_header = picohttp.Header{ .name = "Connection", .value = "keep-alive" }; const connection_closing_header = picohttp.Header{ .name = "Connection", .value = "close" }; const accept_header = picohttp.Header{ .name = "Accept", .value = "*/*" }; -const accept_header_hash = hashHeaderName("Accept"); const accept_encoding_no_compression = "identity"; const accept_encoding_compression = "gzip, deflate"; @@ -1105,11 +1111,7 @@ const accept_encoding_header = if (FeatureFlags.disable_compression_in_http_clie else accept_encoding_header_compression; -const accept_encoding_header_hash = hashHeaderName("Accept-Encoding"); - const user_agent_header = picohttp.Header{ .name = "User-Agent", .value = Global.user_agent }; -const user_agent_header_hash = hashHeaderName("User-Agent"); -const location_header_hash = hashHeaderName("Location"); pub fn headerStr(this: *const HTTPClient, ptr: Api.StringPointer) string { return this.header_buf[ptr.offset..][0..ptr.length]; @@ -1394,31 +1396,33 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { // Skip host and connection header // we manage those switch (hash) { - connection_header_hash, - content_length_header_hash, + hashHeaderConst("Connection"), + hashHeaderConst("Content-Length"), => continue, - hashHeaderName("if-modified-since") => { + hashHeaderConst("if-modified-since") => { if (this.force_last_modified and this.if_modified_since.len == 0) { this.if_modified_since = this.headerStr(header_values[i]); } }, - host_header_hash => { + hashHeaderConst(host_header_name) => { override_host_header = true; }, - accept_header_hash => { + hashHeaderConst("Accept") => { override_accept_header = true; }, + hashHeaderConst("User-Agent") => { + override_user_agent = true; + }, + hashHeaderConst("Accept-Encoding") => { + override_accept_encoding = true; + }, else => {}, } - override_user_agent = override_user_agent or hash == user_agent_header_hash; - - override_accept_encoding = override_accept_encoding or hash == accept_encoding_header_hash; - - request_headers_buf[header_count] = (picohttp.Header{ + request_headers_buf[header_count] = .{ .name = name, .value = this.headerStr(header_values[i]), - }); + }; // header_name_hashes[header_count] = hash; @@ -1450,7 +1454,7 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { } if (!override_host_header) { - request_headers_buf[header_count] = picohttp.Header{ + request_headers_buf[header_count] = .{ .name = host_header_name, .value = this.url.host, }; @@ -1463,7 +1467,7 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { } if (body_len > 0) { - request_headers_buf[header_count] = picohttp.Header{ + request_headers_buf[header_count] = .{ .name = content_length_header_name, .value = std.fmt.bufPrint(&this.request_content_len_buf, "{d}", .{body_len}) catch "0", }; @@ -2496,11 +2500,11 @@ pub fn handleResponseMetadata( var pretend_304 = false; for (response.headers, 0..) |header, header_i| { switch (hashHeaderName(header.name)) { - content_length_header_hash => { + hashHeaderConst("Content-Length") => { const content_length = std.fmt.parseInt(@TypeOf(this.state.body_size), header.value, 10) catch 0; this.state.body_size = content_length; }, - content_encoding_hash => { + hashHeaderConst("Content-Encoding") => { if (strings.eqlComptime(header.value, "gzip")) { this.state.encoding = Encoding.gzip; this.state.content_encoding_i = @truncate(u8, header_i); @@ -2511,7 +2515,7 @@ pub fn handleResponseMetadata( return error.UnsupportedContentEncoding; } }, - transfer_encoding_header => { + hashHeaderConst("Transfer-Encoding") => { if (strings.eqlComptime(header.value, "gzip")) { this.state.transfer_encoding = Encoding.gzip; } else if (strings.eqlComptime(header.value, "deflate")) { @@ -2524,17 +2528,17 @@ pub fn handleResponseMetadata( return error.UnsupportedTransferEncoding; } }, - location_header_hash => { + hashHeaderConst("Location") => { location = header.value; }, - hashHeaderName("Connection") => { + hashHeaderConst("Connection") => { if (response.status_code >= 200 and response.status_code <= 299) { if (!strings.eqlComptime(header.value, "keep-alive")) { this.state.allow_keepalive = false; } } }, - hashHeaderName("Last-Modified") => { + hashHeaderConst("Last-Modified") => { pretend_304 = this.force_last_modified and response.status_code > 199 and response.status_code < 300 and this.if_modified_since.len > 0 and strings.eql(this.if_modified_since, header.value); }, @@ -2585,15 +2589,15 @@ pub fn handleResponseMetadata( if (is_protocol_relative) { if (is_http) { url_buf.data[0.."http".len].* = "http".*; - std.mem.copy(u8, url_buf.data["http".len..], location); + bun.copy(u8, url_buf.data["http".len..], location); url_buf_len += "http".len; } else { url_buf.data[0.."https".len].* = "https".*; - std.mem.copy(u8, url_buf.data["https".len..], location); + bun.copy(u8, url_buf.data["https".len..], location); url_buf_len += "https".len; } } else { - std.mem.copy(u8, &url_buf.data, location); + bun.copy(u8, &url_buf.data, location); } this.url = URL.parse(url_buf.data[0..url_buf_len]); @@ -2612,11 +2616,11 @@ pub fn handleResponseMetadata( if (strings.eqlComptime(protocol_name, "http")) { url_buf.data[0.."http:".len].* = "http:".*; - std.mem.copy(u8, url_buf.data["http:".len..], location); + bun.copy(u8, url_buf.data["http:".len..], location); url_buf_len += "http:".len; } else { url_buf.data[0.."https:".len].* = "https:".*; - std.mem.copy(u8, url_buf.data["https:".len..], location); + bun.copy(u8, url_buf.data["https:".len..], location); url_buf_len += "https:".len; } diff --git a/src/install/bin.zig b/src/install/bin.zig index f19d8a6ad..3b90544e0 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -330,7 +330,7 @@ pub const Bin = extern struct { return; }; const rel = Path.relative(from, to); - std.mem.copy(u8, remain, rel); + bun.copy(u8, remain, rel); remain = remain[rel.len..]; remain[0] = std.fs.path.sep; remain = remain[1..]; @@ -342,7 +342,7 @@ pub const Bin = extern struct { return; } - std.mem.copy(u8, &target_buf, this.global_bin_path); + bun.copy(u8, &target_buf, this.global_bin_path); from_remain = target_buf[this.global_bin_path.len..]; from_remain[0] = std.fs.path.sep; from_remain = from_remain[1..]; @@ -358,7 +358,7 @@ pub const Bin = extern struct { } const name = this.package_name.slice(); - std.mem.copy(u8, remain, name); + bun.copy(u8, remain, name); remain = remain[name.len..]; remain[0] = std.fs.path.sep; remain = remain[1..]; @@ -379,7 +379,7 @@ pub const Bin = extern struct { if (strings.hasPrefixComptime(target, "./")) { target = target["./".len..]; } - std.mem.copy(u8, remain, target); + bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; const target_len = @ptrToInt(remain.ptr) - @ptrToInt(&dest_buf); @@ -389,7 +389,7 @@ pub const Bin = extern struct { // we need to use the unscoped package name here // this is why @babel/parser would fail to link const unscoped_name = unscopedPackageName(name); - std.mem.copy(u8, from_remain, unscoped_name); + bun.copy(u8, from_remain, unscoped_name); from_remain = from_remain[unscoped_name.len..]; from_remain[0] = 0; var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -401,7 +401,7 @@ pub const Bin = extern struct { if (strings.hasPrefixComptime(target, "./")) { target = target["./".len..]; } - std.mem.copy(u8, remain, target); + bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; const target_len = @ptrToInt(remain.ptr) - @ptrToInt(&dest_buf); @@ -409,7 +409,7 @@ pub const Bin = extern struct { var target_path: [:0]u8 = dest_buf[0..target_len :0]; var name_to_use = this.bin.value.named_file[0].slice(this.string_buf); - std.mem.copy(u8, from_remain, name_to_use); + bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -431,7 +431,7 @@ pub const Bin = extern struct { if (strings.hasPrefixComptime(target, "./")) { target = target["./".len..]; } - std.mem.copy(u8, remain, target); + bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; const target_len = @ptrToInt(remain.ptr) - @ptrToInt(&dest_buf); @@ -439,7 +439,7 @@ pub const Bin = extern struct { var target_path: [:0]u8 = dest_buf[0..target_len :0]; var name_to_use = name_in_terminal.slice(this.string_buf); - std.mem.copy(u8, from_remain, name_to_use); + bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -455,7 +455,7 @@ pub const Bin = extern struct { var parts = [_][]const u8{ name, target }; - std.mem.copy(u8, remain, target); + bun.copy(u8, remain, target); remain = remain[target.len..]; var dir = std.fs.Dir{ .fd = this.package_installed_node_modules }; @@ -484,7 +484,7 @@ pub const Bin = extern struct { switch (entry.kind) { std.fs.IterableDir.Entry.Kind.SymLink, std.fs.IterableDir.Entry.Kind.File => { target_buf_remain = prev_target_buf_remain; - std.mem.copy(u8, target_buf_remain, entry.name); + bun.copy(u8, target_buf_remain, entry.name); target_buf_remain = target_buf_remain[entry.name.len..]; target_buf_remain[0] = 0; var from_path: [:0]u8 = target_buf[0 .. @ptrToInt(target_buf_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -535,7 +535,7 @@ pub const Bin = extern struct { } const name = this.package_name.slice(); - std.mem.copy(u8, remain, name); + bun.copy(u8, remain, name); remain = remain[name.len..]; remain[0] = std.fs.path.sep; remain = remain[1..]; @@ -554,7 +554,7 @@ pub const Bin = extern struct { // we need to use the unscoped package name here // this is why @babel/parser would fail to link const unscoped_name = unscopedPackageName(name); - std.mem.copy(u8, from_remain, unscoped_name); + bun.copy(u8, from_remain, unscoped_name); from_remain = from_remain[unscoped_name.len..]; from_remain[0] = 0; var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -563,7 +563,7 @@ pub const Bin = extern struct { }, .named_file => { var name_to_use = this.bin.value.named_file[0].slice(this.string_buf); - std.mem.copy(u8, from_remain, name_to_use); + bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -585,13 +585,13 @@ pub const Bin = extern struct { if (strings.hasPrefix(target, "./")) { target = target[2..]; } - std.mem.copy(u8, remain, target); + bun.copy(u8, remain, target); remain = remain[target.len..]; remain[0] = 0; remain = remain[1..]; var name_to_use = name_in_terminal.slice(this.string_buf); - std.mem.copy(u8, from_remain, name_to_use); + bun.copy(u8, from_remain, name_to_use); from_remain = from_remain[name_to_use.len..]; from_remain[0] = 0; var dest_path: [:0]u8 = target_buf[0 .. @ptrToInt(from_remain.ptr) - @ptrToInt(&target_buf) :0]; @@ -607,7 +607,7 @@ pub const Bin = extern struct { var parts = [_][]const u8{ name, target }; - std.mem.copy(u8, remain, target); + bun.copy(u8, remain, target); remain = remain[target.len..]; var dir = std.fs.Dir{ .fd = this.package_installed_node_modules }; @@ -636,7 +636,7 @@ pub const Bin = extern struct { switch (entry.kind) { std.fs.IterableDir.Entry.Kind.SymLink, std.fs.IterableDir.Entry.Kind.File => { target_buf_remain = prev_target_buf_remain; - std.mem.copy(u8, target_buf_remain, entry.name); + bun.copy(u8, target_buf_remain, entry.name); target_buf_remain = target_buf_remain[entry.name.len..]; target_buf_remain[0] = 0; var to_path = if (!link_global) diff --git a/src/install/bit_set.zig b/src/install/bit_set.zig index 711c4603f..661a3a4bf 100644 --- a/src/install/bit_set.zig +++ b/src/install/bit_set.zig @@ -31,8 +31,9 @@ //! allocator, in order to save space. const std = @import("std"); -const assert = std.debug.assert; const Allocator = std.mem.Allocator; +const bun = @import("bun"); +const Environment = bun.Environment; /// Returns the optimal static bit set type for the specified number /// of elements. The returned type will perform no allocations, @@ -85,7 +86,7 @@ pub fn IntegerBitSet(comptime size: u16) type { /// Returns true if the bit at the specified index /// is present in the set, false otherwise. pub fn isSet(self: Self, index: usize) bool { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); return (self.mask & maskBit(index)) != 0; } @@ -97,7 +98,7 @@ pub fn IntegerBitSet(comptime size: u16) type { /// Changes the value of the specified bit of the bit /// set to match the passed boolean. pub fn setValue(self: *Self, index: usize, value: bool) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); if (MaskInt == u0) return; const bit = maskBit(index); const new_bit = bit & std.math.boolMask(MaskInt, value); @@ -106,15 +107,17 @@ pub fn IntegerBitSet(comptime size: u16) type { /// Adds a specific bit to the bit set pub fn set(self: *Self, index: usize) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); self.mask |= maskBit(index); } /// Changes the value of all bits in the specified range to /// match the passed boolean. pub fn setRangeValue(self: *Self, range: Range, value: bool) void { - assert(range.end <= bit_length); - assert(range.start <= range.end); + if (comptime Environment.allow_assert) { + std.debug.assert(range.end <= bit_length); + std.debug.assert(range.start <= range.end); + } if (range.start == range.end) return; if (MaskInt == u0) return; @@ -137,7 +140,7 @@ pub fn IntegerBitSet(comptime size: u16) type { /// Removes a specific bit from the bit set pub fn unset(self: *Self, index: usize) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); // Workaround for #7953 if (MaskInt == u0) return; self.mask &= ~maskBit(index); @@ -145,7 +148,7 @@ pub fn IntegerBitSet(comptime size: u16) type { /// Flips a specific bit in the bit set pub fn toggle(self: *Self, index: usize) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); self.mask ^= maskBit(index); } @@ -400,7 +403,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { /// Returns true if the bit at the specified index /// is present in the set, false otherwise. pub fn isSet(self: *const Self, index: usize) bool { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); if (num_masks == 0) return false; // doesn't compile in this case return (self.masks[maskIndex(index)] & maskBit(index)) != 0; } @@ -417,7 +420,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { /// Changes the value of the specified bit of the bit /// set to match the passed boolean. pub fn setValue(self: *Self, index: usize, value: bool) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); if (num_masks == 0) return; // doesn't compile in this case const bit = maskBit(index); const mask_index = maskIndex(index); @@ -427,7 +430,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { /// Adds a specific bit to the bit set pub fn set(self: *Self, index: usize) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); if (num_masks == 0) return; // doesn't compile in this case self.masks[maskIndex(index)] |= maskBit(index); } @@ -435,8 +438,10 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { /// Changes the value of all bits in the specified range to /// match the passed boolean. pub fn setRangeValue(self: *Self, range: Range, value: bool) void { - assert(range.end <= bit_length); - assert(range.start <= range.end); + if (comptime Environment.allow_assert) { + std.debug.assert(range.end <= bit_length); + std.debug.assert(range.start <= range.end); + } if (range.start == range.end) return; if (num_masks == 0) return; @@ -479,14 +484,14 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { /// Removes a specific bit from the bit set pub fn unset(self: *Self, index: usize) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); if (num_masks == 0) return; // doesn't compile in this case self.masks[maskIndex(index)] &= ~maskBit(index); } /// Flips a specific bit in the bit set pub fn toggle(self: *Self, index: usize) void { - assert(index < bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < bit_length); if (num_masks == 0) return; // doesn't compile in this case self.masks[maskIndex(index)] ^= maskBit(index); } @@ -702,7 +707,7 @@ pub const DynamicBitSetUnmanaged = struct { const old_allocation = (self.masks - 1)[0..(self.masks - 1)[0]]; if (new_masks == 0) { - assert(new_len == 0); + if (comptime Environment.allow_assert) std.debug.assert(new_len == 0); allocator.free(old_allocation); self.masks = empty_masks_ptr; self.bit_length = 0; @@ -765,7 +770,7 @@ pub const DynamicBitSetUnmanaged = struct { const num_masks = numMasks(self.bit_length); var copy = Self{}; try copy.resize(new_allocator, self.bit_length, false); - std.mem.copy(MaskInt, copy.masks[0..num_masks], self.masks[0..num_masks]); + bun.copy(MaskInt, copy.masks, self.masks[0..num_masks]); return copy; } @@ -777,7 +782,7 @@ pub const DynamicBitSetUnmanaged = struct { /// Returns true if the bit at the specified index /// is present in the set, false otherwise. pub fn isSet(self: Self, index: usize) bool { - assert(index < self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length); return (self.masks[maskIndex(index)] & maskBit(index)) != 0; } @@ -795,7 +800,7 @@ pub const DynamicBitSetUnmanaged = struct { /// Changes the value of the specified bit of the bit /// set to match the passed boolean. pub fn setValue(self: *Self, index: usize, value: bool) void { - assert(index < self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length); const bit = maskBit(index); const mask_index = maskIndex(index); const new_bit = bit & std.math.boolMask(MaskInt, value); @@ -804,15 +809,15 @@ pub const DynamicBitSetUnmanaged = struct { /// Adds a specific bit to the bit set pub fn set(self: *Self, index: usize) void { - assert(index < self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length); self.masks[maskIndex(index)] |= maskBit(index); } /// Changes the value of all bits in the specified range to /// match the passed boolean. pub fn setRangeValue(self: *Self, range: Range, value: bool) void { - assert(range.end <= self.bit_length); - assert(range.start <= range.end); + if (comptime Environment.allow_assert) std.debug.assert(range.end <= self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(range.start <= range.end); if (range.start == range.end) return; const start_mask_index = maskIndex(range.start); @@ -854,13 +859,13 @@ pub const DynamicBitSetUnmanaged = struct { /// Removes a specific bit from the bit set pub fn unset(self: *Self, index: usize) void { - assert(index < self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length); self.masks[maskIndex(index)] &= ~maskBit(index); } /// Flips a specific bit in the bit set pub fn toggle(self: *Self, index: usize) void { - assert(index < self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length); self.masks[maskIndex(index)] ^= maskBit(index); } @@ -868,7 +873,7 @@ pub const DynamicBitSetUnmanaged = struct { /// in the toggles bit set. Both sets must have the /// same bit_length. pub fn toggleSet(self: *Self, toggles: Self) void { - assert(toggles.bit_length == self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(toggles.bit_length == self.bit_length); const num_masks = numMasks(self.bit_length); for (&self.masks[0..num_masks], 0..) |*mask, i| { mask.* ^= toggles.masks[i]; @@ -911,7 +916,7 @@ pub const DynamicBitSetUnmanaged = struct { /// set if the corresponding bits were set in either input. /// The two sets must both be the same bit_length. pub fn setUnion(self: *Self, other: Self) void { - assert(other.bit_length == self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length); const num_masks = numMasks(self.bit_length); for (&self.masks[0..num_masks], 0..) |*mask, i| { mask.* |= other.masks[i]; @@ -923,7 +928,7 @@ pub const DynamicBitSetUnmanaged = struct { /// set if the corresponding bits were set in both inputs. /// The two sets must both be the same bit_length. pub fn setIntersection(self: *Self, other: Self) void { - assert(other.bit_length == self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length); const num_masks = numMasks(self.bit_length); for (&self.masks[0..num_masks], 0..) |*mask, i| { mask.* &= other.masks[i]; @@ -931,7 +936,7 @@ pub const DynamicBitSetUnmanaged = struct { } pub fn setExcludeTwo(self: *Self, other: Self, third: Self) void { - assert(other.bit_length == self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length); const num_masks = numMasks(self.bit_length); for (&self.masks[0..num_masks], 0..) |*mask, i| { mask.* &= ~other.masks[i]; @@ -940,7 +945,7 @@ pub const DynamicBitSetUnmanaged = struct { } pub fn setExclude(self: *Self, other: Self) void { - assert(other.bit_length == self.bit_length); + if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length); const num_masks = numMasks(self.bit_length); for (&self.masks[0..num_masks], 0..) |*mask, i| { mask.* &= ~other.masks[i]; diff --git a/src/install/install.zig b/src/install/install.zig index 07bd06692..0c280ccaa 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -81,7 +81,6 @@ const String = Semver.String; const GlobalStringBuilder = @import("../string_builder.zig"); const SlicedString = Semver.SlicedString; const Repository = @import("./repository.zig").Repository; -const StructBuilder = @import("../builder.zig"); const Bin = @import("./bin.zig").Bin; const Dependency = @import("./dependency.zig"); const Behavior = @import("./dependency.zig").Behavior; @@ -799,7 +798,7 @@ const PackageInstall = struct { var total: usize = 0; var read: usize = 0; - std.mem.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ ".bun-tag"); + bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ ".bun-tag"); this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len] = 0; const bun_tag_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len :0]; defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0; @@ -861,7 +860,7 @@ const PackageInstall = struct { var total: usize = 0; var read: usize = 0; - std.mem.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ "package.json"); + bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ "package.json"); this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len] = 0; var package_json_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len :0]; defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0; @@ -987,7 +986,7 @@ const PackageInstall = struct { std.os.mkdirat(destination_dir_.dir.fd, entry.path, 0o755) catch {}; }, .File => { - std.mem.copy(u8, &stackpath, entry.path); + bun.copy(u8, &stackpath, entry.path); stackpath[entry.path.len] = 0; var path: [:0]u8 = stackpath[0..entry.path.len :0]; var basename: [:0]u8 = stackpath[entry.path.len - entry.basename.len .. entry.path.len :0]; @@ -1681,7 +1680,7 @@ pub const PackageManager = struct { const index = @truncate(DependencyID, this.lockfile.buffers.dependencies.items.len); this.lockfile.buffers.dependencies.append(this.allocator, cloned_dependency) catch unreachable; this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable; - if (Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len); + if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len); if (is_main) { this.enqueueDependencyWithMainAndSuccessFn( index, @@ -1812,15 +1811,15 @@ pub const PackageManager = struct { ) void { if (Output.isEmojiEnabled()) { if (is_first) { - std.mem.copy(u8, &this.progress_name_buf, emoji); - std.mem.copy(u8, this.progress_name_buf[emoji.len..], name); + bun.copy(u8, &this.progress_name_buf, emoji); + bun.copy(u8, this.progress_name_buf[emoji.len..], name); node.name = this.progress_name_buf[0 .. emoji.len + name.len]; } else { - std.mem.copy(u8, this.progress_name_buf[emoji.len..], name); + bun.copy(u8, this.progress_name_buf[emoji.len..], name); node.name = this.progress_name_buf[0 .. emoji.len + name.len]; } } else { - std.mem.copy(u8, &this.progress_name_buf, name); + bun.copy(u8, &this.progress_name_buf, name); node.name = this.progress_name_buf[0..name.len]; } } @@ -4256,7 +4255,7 @@ pub const PackageManager = struct { const len = std.mem.count(u8, native_packages, " "); if (len > 0) { var all = try allocator.alloc(PackageNameHash, this.native_bin_link_allowlist.len + len); - std.mem.copy(PackageNameHash, all, this.native_bin_link_allowlist); + bun.copy(PackageNameHash, all, this.native_bin_link_allowlist); var remain = all[this.native_bin_link_allowlist.len..]; var splitter = std.mem.split(u8, native_packages, " "); var i: usize = 0; @@ -4373,7 +4372,7 @@ pub const PackageManager = struct { if (cli.link_native_bins.len > 0) { var all = try allocator.alloc(PackageNameHash, this.native_bin_link_allowlist.len + cli.link_native_bins.len); - std.mem.copy(PackageNameHash, all, this.native_bin_link_allowlist); + bun.copy(PackageNameHash, all, this.native_bin_link_allowlist); var remain = all[this.native_bin_link_allowlist.len..]; for (cli.link_native_bins, 0..) |name, i| { remain[i] = String.Builder.stringHash(name); @@ -4527,7 +4526,7 @@ pub const PackageManager = struct { } var new_dependencies = try allocator.alloc(G.Property, dependencies.len + remaining - replacing); - std.mem.copy(G.Property, new_dependencies, dependencies); + bun.copy(G.Property, new_dependencies, dependencies); std.mem.set(G.Property, new_dependencies[dependencies.len..], G.Property{}); outer: for (updates) |*update| { @@ -4618,7 +4617,7 @@ pub const PackageManager = struct { current_package_json.* = JSAst.Expr.init(JSAst.E.Object, JSAst.E.Object{ .properties = JSAst.G.Property.List.init(root_properties) }, logger.Loc.Empty); } else if (needs_new_dependency_list) { var root_properties = try allocator.alloc(JSAst.G.Property, current_package_json.data.e_object.properties.len + 1); - std.mem.copy(JSAst.G.Property, root_properties, current_package_json.data.e_object.properties.slice()); + bun.copy(JSAst.G.Property, root_properties, current_package_json.data.e_object.properties.slice()); root_properties[root_properties.len - 1] = .{ .key = JSAst.Expr.init( JSAst.E.String, @@ -4705,7 +4704,7 @@ pub const PackageManager = struct { var fs = try Fs.FileSystem.init1(ctx.allocator, null); var original_cwd = std.mem.trimRight(u8, fs.top_level_dir, "/"); - std.mem.copy(u8, &cwd_buf, original_cwd); + bun.copy(u8, &cwd_buf, original_cwd); // Step 1. Find the nearest package.json directory // @@ -4736,7 +4735,7 @@ pub const PackageManager = struct { }; } - std.mem.copy(u8, &cwd_buf, original_cwd); + bun.copy(u8, &cwd_buf, original_cwd); cwd_buf[original_cwd.len] = 0; var real_cwd: [:0]u8 = cwd_buf[0..original_cwd.len :0]; std.os.chdirZ(real_cwd) catch {}; @@ -4749,8 +4748,8 @@ pub const PackageManager = struct { cwd_buf[fs.top_level_dir.len] = '/'; cwd_buf[fs.top_level_dir.len + 1] = 0; fs.top_level_dir = cwd_buf[0 .. fs.top_level_dir.len + 1]; - std.mem.copy(u8, &package_json_cwd_buf, fs.top_level_dir); - std.mem.copy(u8, package_json_cwd_buf[fs.top_level_dir.len..], "package.json"); + bun.copy(u8, &package_json_cwd_buf, fs.top_level_dir); + bun.copy(u8, package_json_cwd_buf[fs.top_level_dir.len..], "package.json"); var entries_option = try fs.fs.readDirectory(fs.top_level_dir, null); var options = Options{ @@ -5461,7 +5460,7 @@ pub const PackageManager = struct { buf2[path_.len] = 0; final_path = buf2[0..path_.len :0]; } else { - std.mem.copy(u8, &buf, cwd_); + bun.copy(u8, &buf, cwd_); buf[cwd_.len] = 0; final_path = buf[0..cwd_.len :0]; } @@ -5980,8 +5979,8 @@ pub const PackageManager = struct { var cwd = std.fs.cwd(); // This is not exactly correct var node_modules_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - std.mem.copy(u8, &node_modules_buf, "node_modules" ++ std.fs.path.sep_str); - var offset_buf: []u8 = node_modules_buf["node_modules/".len..]; + bun.copy(u8, &node_modules_buf, "node_modules" ++ std.fs.path.sep_str); + var offset_buf = node_modules_buf["node_modules/".len..]; const name_hashes = manager.lockfile.packages.items(.name_hash); for (updates) |update| { // If the package no longer exists in the updated lockfile, delete the directory @@ -5989,7 +5988,7 @@ pub const PackageManager = struct { // It does not handle nested dependencies // This is a quick & dirty cleanup intended for when deleting top-level dependencies if (std.mem.indexOfScalar(PackageNameHash, name_hashes, String.Builder.stringHash(update.name)) == null) { - std.mem.copy(u8, offset_buf, update.name); + bun.copy(u8, offset_buf, update.name); cwd.deleteTree(node_modules_buf[0 .. "node_modules/".len + update.name.len]) catch {}; } } @@ -6005,7 +6004,7 @@ pub const PackageManager = struct { // any symlinks which we are unable to open are assumed to be dangling // note that using access won't work here, because access doesn't resolve symlinks - std.mem.copy(u8, &node_modules_buf, entry.name); + bun.copy(u8, &node_modules_buf, entry.name); node_modules_buf[entry.name.len] = 0; var buf: [:0]u8 = node_modules_buf[0..entry.name.len :0]; @@ -6134,7 +6133,7 @@ pub const PackageManager = struct { const alias = this.lockfile.buffers.dependencies.items[dependency_id].name.slice(buf); const destination_dir_subpath: [:0]u8 = brk: { - std.mem.copy(u8, &this.destination_dir_subpath_buf, alias); + bun.copy(u8, &this.destination_dir_subpath_buf, alias); this.destination_dir_subpath_buf[alias.len] = 0; break :brk this.destination_dir_subpath_buf[0..alias.len :0]; }; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 80f95ac83..c7009c57b 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -1,3 +1,5 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; const bun = @import("bun"); const string = bun.string; const Output = bun.Output; @@ -8,10 +10,10 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const C = bun.C; -const std = @import("std"); +const JSAst = bun.JSAst; const JSLexer = bun.js_lexer; -const logger = @import("bun").logger; +const logger = bun.logger; const js_parser = bun.js_parser; const Expr = @import("../js_ast.zig").Expr; @@ -31,19 +33,17 @@ const NodeModuleBundle = @import("../node_module_bundle.zig").NodeModuleBundle; const DotEnv = @import("../env_loader.zig"); const which = @import("../which.zig").which; const Run = @import("../bun_js.zig").Run; -const HeaderBuilder = @import("bun").HTTP.HeaderBuilder; +const HeaderBuilder = bun.HTTP.HeaderBuilder; const Fs = @import("../fs.zig"); const FileSystem = Fs.FileSystem; const Lock = @import("../lock.zig").Lock; -var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; -var path_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; const URL = @import("../url.zig").URL; -const AsyncHTTP = @import("bun").HTTP.AsyncHTTP; -const HTTPChannel = @import("bun").HTTP.HTTPChannel; -const NetworkThread = @import("bun").HTTP.NetworkThread; +const AsyncHTTP = bun.HTTP.AsyncHTTP; +const HTTPChannel = bun.HTTP.HTTPChannel; +const NetworkThread = bun.HTTP.NetworkThread; const Integrity = @import("./integrity.zig").Integrity; -const clap = @import("bun").clap; +const clap = bun.clap; const ExtractTarball = @import("./extract_tarball.zig"); const Npm = @import("./npm.zig"); const Bitset = @import("./bit_set.zig").DynamicBitSetUnmanaged; @@ -58,33 +58,32 @@ const String = Semver.String; const GlobalStringBuilder = @import("../string_builder.zig"); const SlicedString = Semver.SlicedString; const Repository = @import("./repository.zig").Repository; -const StructBuilder = @import("../builder.zig"); const Bin = @import("./bin.zig").Bin; const Dependency = @import("./dependency.zig"); const Behavior = Dependency.Behavior; const FolderResolution = @import("./resolvers/folder_resolver.zig").FolderResolution; -const PackageManager = @import("./install.zig").PackageManager; -const ExternalSlice = @import("./install.zig").ExternalSlice; -const ExternalSliceAligned = @import("./install.zig").ExternalSliceAligned; -const PackageID = @import("./install.zig").PackageID; -const DependencyID = @import("./install.zig").DependencyID; -const Features = @import("./install.zig").Features; -const PackageInstall = @import("./install.zig").PackageInstall; -const PackageNameHash = @import("./install.zig").PackageNameHash; -const Aligner = @import("./install.zig").Aligner; -const ExternalStringMap = @import("./install.zig").ExternalStringMap; -const alignment_bytes_to_repeat_buffer = @import("./install.zig").alignment_bytes_to_repeat_buffer; +const Install = @import("./install.zig"); +const PackageManager = Install.PackageManager; +const ExternalSlice = Install.ExternalSlice; +const ExternalSliceAligned = Install.ExternalSliceAligned; +const PackageID = Install.PackageID; +const DependencyID = Install.DependencyID; +const Features = Install.Features; +const PackageInstall = Install.PackageInstall; +const PackageNameHash = Install.PackageNameHash; +const Aligner = Install.Aligner; +const ExternalStringMap = Install.ExternalStringMap; +const alignment_bytes_to_repeat_buffer = Install.alignment_bytes_to_repeat_buffer; +const initializeStore = Install.initializeStore; +const invalid_package_id = Install.invalid_package_id; +const ExternalStringList = Install.ExternalStringList; const Resolution = @import("./resolution.zig").Resolution; -const initializeStore = @import("./install.zig").initializeStore; -const invalid_package_id = @import("./install.zig").invalid_package_id; -const JSAst = bun.JSAst; -const Origin = @import("./install.zig").Origin; +const Origin = Install.Origin; const Crypto = @import("../sha.zig").Hashers; -pub const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8; -const zero_hash = std.mem.zeroes(MetaHash); - const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; +const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8; +const zero_hash = std.mem.zeroes(MetaHash); const NameHashMap = std.ArrayHashMapUnmanaged(u32, String, ArrayIdentityContext, false); // Serialized data @@ -93,16 +92,16 @@ format: FormatVersion = .v1, meta_hash: MetaHash = zero_hash, -packages: Lockfile.Package.List = Lockfile.Package.List{}, -buffers: Buffers = Buffers{}, +packages: Lockfile.Package.List = .{}, +buffers: Buffers = .{}, /// name -> PackageID || [*]PackageID /// Not for iterating. package_index: PackageIndex.Map, unique_packages: Bitset, string_pool: StringPool, -allocator: std.mem.Allocator, -scratch: Scratch = Scratch{}, +allocator: Allocator, +scratch: Scratch = .{}, scripts: Scripts = .{}, workspace_paths: NameHashMap = .{}, @@ -134,7 +133,7 @@ pub const Scripts = struct { this.postprepare.items.len) > 0; } - pub fn run(this: *Scripts, allocator: std.mem.Allocator, env: *DotEnv.Loader, silent: bool, comptime hook: []const u8) !void { + pub fn run(this: *Scripts, allocator: Allocator, env: *DotEnv.Loader, silent: bool, comptime hook: []const u8) !void { for (@field(this, hook).items) |entry| { std.debug.assert(Fs.FileSystem.instance_loaded); const cwd = Path.joinAbsString( @@ -148,7 +147,7 @@ pub const Scripts = struct { } } - pub fn deinit(this: *Scripts, allocator: std.mem.Allocator) void { + pub fn deinit(this: *Scripts, allocator: Allocator) void { this.preinstall.deinit(allocator); this.install.deinit(allocator); this.postinstall.deinit(allocator); @@ -179,7 +178,7 @@ pub const LoadFromDiskResult = union(Tag) { }; }; -pub fn loadFromDisk(this: *Lockfile, allocator: std.mem.Allocator, log: *logger.Log, filename: stringZ) LoadFromDiskResult { +pub fn loadFromDisk(this: *Lockfile, allocator: Allocator, log: *logger.Log, filename: stringZ) LoadFromDiskResult { std.debug.assert(FileSystem.instance_loaded); var file = std.io.getStdIn(); @@ -199,7 +198,7 @@ pub fn loadFromDisk(this: *Lockfile, allocator: std.mem.Allocator, log: *logger. return this.loadFromBytes(buf, allocator, log); } -pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: std.mem.Allocator, log: *logger.Log) LoadFromDiskResult { +pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *logger.Log) LoadFromDiskResult { var stream = Stream{ .buffer = buf, .pos = 0 }; this.format = FormatVersion.current; @@ -324,7 +323,7 @@ pub const Tree = struct { const tree_id = this.depth_stack[depth_buf_len]; const name = this.dependencies[this.trees[tree_id].dependency_id].name.slice(string_buf); - std.mem.copy(u8, this.path_buf[path_written..], name); + bun.copy(u8, this.path_buf[path_written..], name); path_written += name.len; this.path_buf[path_written..][0.."/node_modules".len].* = (std.fs.path.sep_str ++ "node_modules").*; @@ -345,7 +344,7 @@ pub const Tree = struct { }; const Builder = struct { - allocator: std.mem.Allocator, + allocator: Allocator, name_hashes: []const PackageNameHash, list: ArrayList = .{}, resolutions: []const PackageID, @@ -840,7 +839,7 @@ pub const Printer = struct { var lockfile_path_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; pub fn print( - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, lockfile_path_: string, format: Format, @@ -856,7 +855,7 @@ pub const Printer = struct { lockfile_path_buf2[lockfile_path__.len] = 0; lockfile_path = lockfile_path_buf2[0..lockfile_path__.len :0]; } else { - std.mem.copy(u8, &lockfile_path_buf1, lockfile_path); + bun.copy(u8, &lockfile_path_buf1, lockfile_path); lockfile_path_buf1[lockfile_path_.len] = 0; lockfile_path = lockfile_path_buf1[0..lockfile_path_.len :0]; } @@ -907,7 +906,7 @@ pub const Printer = struct { } pub fn printWithLockfile( - allocator: std.mem.Allocator, + allocator: Allocator, lockfile: *Lockfile, format: Format, comptime Writer: type, @@ -1451,11 +1450,11 @@ inline fn strWithType(this: *Lockfile, comptime Type: type, slicable: Type) stri return slicable.slice(this.buffers.string_bytes.items); } -pub fn initEmpty(this: *Lockfile, allocator: std.mem.Allocator) !void { - this.* = Lockfile{ +pub fn initEmpty(this: *Lockfile, allocator: Allocator) !void { + this.* = .{ .format = Lockfile.FormatVersion.current, - .packages = Lockfile.Package.List{}, - .buffers = Buffers{}, + .packages = .{}, + .buffers = .{}, .package_index = PackageIndex.Map.initContext(allocator, .{}), .unique_packages = try Bitset.initFull(allocator, 0), .string_pool = StringPool.init(allocator), @@ -1574,7 +1573,7 @@ pub const Scratch = struct { duplicate_checker_map: DuplicateCheckerMap = undefined, dependency_list_queue: DependencyQueue = undefined, - pub fn init(allocator: std.mem.Allocator) Scratch { + pub fn init(allocator: Allocator) Scratch { return Scratch{ .dependency_list_queue = DependencyQueue.init(allocator), .duplicate_checker_map = DuplicateCheckerMap.init(allocator), @@ -1583,15 +1582,11 @@ pub const Scratch = struct { }; pub const StringBuilder = struct { - const Allocator = @import("std").mem.Allocator; - const assert = @import("std").debug.assert; - const copy = @import("std").mem.copy; - len: usize = 0, cap: usize = 0, off: usize = 0, ptr: ?[*]u8 = null, - lockfile: *Lockfile = undefined, + lockfile: *Lockfile, pub inline fn count(this: *StringBuilder, slice: string) void { if (String.canInline(slice)) return; @@ -1646,14 +1641,16 @@ pub const StringBuilder = struct { else => @compileError("Invalid type passed to StringBuilder"), } } - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first + } - copy(u8, this.ptr.?[this.len..this.cap], slice); + bun.copy(u8, this.ptr.?[this.len..this.cap], slice); const final_slice = this.ptr.?[this.len..this.cap][0..slice.len]; this.len += slice.len; - assert(this.len <= this.cap); + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); switch (Type) { String => { @@ -1679,19 +1676,21 @@ pub const StringBuilder = struct { } } - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first + } var string_entry = this.lockfile.string_pool.getOrPut(hash) catch unreachable; if (!string_entry.found_existing) { - copy(u8, this.ptr.?[this.len..this.cap], slice); + bun.copy(u8, this.ptr.?[this.len..this.cap], slice); const final_slice = this.ptr.?[this.len..this.cap][0..slice.len]; this.len += slice.len; string_entry.value_ptr.* = String.init(this.lockfile.buffers.string_bytes.items, final_slice); } - assert(this.len <= this.cap); + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); switch (Type) { String => { @@ -2003,7 +2002,7 @@ pub const Package = extern struct { } pub fn fromNPM( - allocator: std.mem.Allocator, + allocator: Allocator, lockfile: *Lockfile, log: *logger.Log, manifest: *const Npm.PackageManifest, @@ -2225,7 +2224,7 @@ pub const Package = extern struct { }; pub fn generate( - _: std.mem.Allocator, + _: Allocator, from_lockfile: *Lockfile, to_lockfile: *Lockfile, from: *Lockfile.Package, @@ -2286,7 +2285,7 @@ pub const Package = extern struct { pub fn parseMain( package: *Lockfile.Package, lockfile: *Lockfile, - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, source: logger.Source, comptime features: Features, @@ -2297,7 +2296,7 @@ pub const Package = extern struct { pub fn parse( package: *Lockfile.Package, lockfile: *Lockfile, - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, source: logger.Source, comptime ResolverContext: type, @@ -2331,7 +2330,7 @@ pub const Package = extern struct { fn parseDependency( lockfile: *Lockfile, - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, source: logger.Source, comptime group: DependencyGroup, @@ -2455,7 +2454,7 @@ pub const Package = extern struct { fn processWorkspaceNamesArray( workspace_names_ptr: *[]string, - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, arr: *JSAst.E.Array, source: *const logger.Source, @@ -2578,7 +2577,7 @@ pub const Package = extern struct { fn parseWithJSON( package: *Lockfile.Package, lockfile: *Lockfile, - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, source: logger.Source, json: Expr, @@ -2833,7 +2832,7 @@ pub const Package = extern struct { try lockfile.buffers.resolutions.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); const total_len = lockfile.buffers.dependencies.items.len + total_dependencies_count; - if (Environment.allow_assert) std.debug.assert(lockfile.buffers.dependencies.items.len == lockfile.buffers.resolutions.items.len); + if (comptime Environment.allow_assert) std.debug.assert(lockfile.buffers.dependencies.items.len == lockfile.buffers.resolutions.items.len); const off = lockfile.buffers.dependencies.items.len; var package_dependencies = lockfile.buffers.dependencies.items.ptr[off..total_len]; @@ -2903,7 +2902,7 @@ pub const Package = extern struct { std.debug.assert(i == extern_strings.len); package.bin = .{ .tag = .map, - .value = .{ .map = @import("./install.zig").ExternalStringList.init(lockfile.buffers.extern_strings.items, extern_strings) }, + .value = .{ .map = ExternalStringList.init(lockfile.buffers.extern_strings.items, extern_strings) }, }; }, } @@ -3216,7 +3215,7 @@ pub const Package = extern struct { pub fn load( stream: *Stream, end: usize, - allocator: std.mem.Allocator, + allocator: Allocator, ) !Lockfile.Package.List { var reader = stream.reader(); @@ -3279,7 +3278,7 @@ const Buffers = struct { // node_modules_package_ids: PackageIDList = PackageIDList{}, string_bytes: StringBuffer = .{}, - pub fn deinit(this: *Buffers, allocator: std.mem.Allocator) void { + pub fn deinit(this: *Buffers, allocator: Allocator) void { this.trees.deinit(allocator); this.resolutions.deinit(allocator); this.dependencies.deinit(allocator); @@ -3287,7 +3286,7 @@ const Buffers = struct { this.string_bytes.deinit(allocator); } - pub fn preallocate(this: *Buffers, that: Buffers, allocator: std.mem.Allocator) !void { + pub fn preallocate(this: *Buffers, that: Buffers, allocator: Allocator) !void { try this.trees.ensureTotalCapacity(allocator, that.trees.items.len); try this.resolutions.ensureTotalCapacity(allocator, that.resolutions.items.len); try this.dependencies.ensureTotalCapacity(allocator, that.dependencies.items.len); @@ -3335,7 +3334,7 @@ const Buffers = struct { }; }; - pub fn readArray(stream: *Stream, allocator: std.mem.Allocator, comptime ArrayList: type) !ArrayList { + pub fn readArray(stream: *Stream, allocator: Allocator, comptime ArrayList: type) !ArrayList { const arraylist: ArrayList = undefined; const PointerType = std.meta.Child(@TypeOf(arraylist.items.ptr)); @@ -3401,7 +3400,7 @@ const Buffers = struct { } } - pub fn save(this: Buffers, allocator: std.mem.Allocator, comptime StreamType: type, stream: StreamType, comptime Writer: type, writer: Writer) !void { + pub fn save(this: Buffers, allocator: Allocator, comptime StreamType: type, stream: StreamType, comptime Writer: type, writer: Writer) !void { inline for (sizes.names) |name| { if (PackageManager.instance.options.log_level.isVerbose()) { Output.prettyErrorln("Saving {d} {s}", .{ @field(this, name).items.len, name }); @@ -3461,7 +3460,7 @@ const Buffers = struct { return error.@"Lockfile is missing resolution data"; } - pub fn load(stream: *Stream, allocator: std.mem.Allocator, log: *logger.Log) !Buffers { + pub fn load(stream: *Stream, allocator: Allocator, log: *logger.Log) !Buffers { var this = Buffers{}; var external_dependency_list_: std.ArrayListUnmanaged(Dependency.External) = std.ArrayListUnmanaged(Dependency.External){}; @@ -3571,7 +3570,7 @@ pub const Serializer = struct { pub fn load( lockfile: *Lockfile, stream: *Stream, - allocator: std.mem.Allocator, + allocator: Allocator, log: *logger.Log, ) !void { var reader = stream.reader(); @@ -3641,7 +3640,7 @@ pub fn hasMetaHashChanged(this: *Lockfile, print_name_version_string: bool) !boo this.meta_hash = try this.generateMetaHash(print_name_version_string); return !strings.eqlLong(&previous_meta_hash, &this.meta_hash, false); } -pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash { +fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash { if (this.packages.len <= 1) return zero_hash; diff --git a/src/install/npm.zig b/src/install/npm.zig index 66f9d02cb..e7fa24b1a 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -180,10 +180,10 @@ pub const Registry = struct { const hashed = HTTPClient.hashHeaderName(header.name); switch (hashed) { - HTTPClient.hashHeaderName("last-modified") => { + HTTPClient.hashHeaderConst("last-modified") => { newly_last_modified = header.value; }, - HTTPClient.hashHeaderName("etag") => { + HTTPClient.hashHeaderConst("etag") => { new_etag = header.value; }, else => {}, @@ -193,7 +193,7 @@ pub const Registry = struct { var new_etag_buf: [64]u8 = undefined; if (new_etag.len < new_etag_buf.len) { - std.mem.copy(u8, &new_etag_buf, new_etag); + bun.copy(u8, &new_etag_buf, new_etag); new_etag = new_etag_buf[0..new_etag.len]; } diff --git a/src/install/repository.zig b/src/install/repository.zig index 557953d76..c63f1294e 100644 --- a/src/install/repository.zig +++ b/src/install/repository.zig @@ -79,7 +79,7 @@ pub const Repository = extern struct { buf: []const u8, repository: *const Repository, pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - if (Environment.allow_assert) std.debug.assert(formatter.label.len > 0); + if (comptime Environment.allow_assert) std.debug.assert(formatter.label.len > 0); try writer.writeAll(formatter.label); const repo = formatter.repository.repo.slice(formatter.buf); @@ -124,13 +124,13 @@ pub const Repository = extern struct { pub fn tryHTTPS(url: string) ?string { if (strings.hasPrefixComptime(url, "ssh://")) { final_path_buf[0.."https".len].* = "https".*; - std.mem.copy(u8, final_path_buf["https".len..], url["ssh".len..]); + bun.copy(u8, final_path_buf["https".len..], url["ssh".len..]); return final_path_buf[0..(url.len - "ssh".len + "https".len)]; } if (Dependency.isSCPLikePath(url)) { final_path_buf[0.."https://".len].* = "https://".*; var rest = final_path_buf["https://".len..]; - std.mem.copy(u8, rest, url); + bun.copy(u8, rest, url); if (strings.indexOfChar(rest, ':')) |colon| rest[colon] = '/'; return final_path_buf[0..(url.len + "https://".len)]; } diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index c16f0c8ed..c9cc682aa 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -84,10 +84,9 @@ pub const FolderResolution = union(Tag) { if (strings.startsWithChar(normalized, '.')) { var tempcat: [bun.MAX_PATH_BYTES]u8 = undefined; - std.mem.copy(u8, &tempcat, normalized); - tempcat[normalized.len] = std.fs.path.sep; - std.mem.copy(u8, tempcat[normalized.len + 1 ..], "package.json"); - var parts = [_]string{ FileSystem.instance.top_level_dir, tempcat[0 .. normalized.len + 1 + "package.json".len] }; + bun.copy(u8, &tempcat, normalized); + tempcat[normalized.len..][0.."/package.json".len].* = (std.fs.path.sep_str ++ "package.json").*; + var parts = [_]string{ FileSystem.instance.top_level_dir, tempcat[0 .. normalized.len + "/package.json".len] }; abs = FileSystem.instance.absBuf(&parts, joined); rel = FileSystem.instance.relative(FileSystem.instance.top_level_dir, abs[0 .. abs.len - "/package.json".len]); } else { @@ -110,10 +109,9 @@ pub const FolderResolution = union(Tag) { }, else => {}, } - std.mem.copy(u8, remain, normalized); - remain[normalized.len] = std.fs.path.sep; - remain[normalized.len + 1 ..][0.."package.json".len].* = "package.json".*; - remain = remain[normalized.len + 1 + "package.json".len ..]; + bun.copy(u8, remain, normalized); + remain[normalized.len..][0.."/package.json".len].* = (std.fs.path.sep_str ++ "package.json").*; + remain = remain[normalized.len + "/package.json".len ..]; abs = joined[0 .. joined.len - remain.len]; // We store the folder name without package.json rel = abs[0 .. abs.len - "/package.json".len]; diff --git a/src/install/semver.zig b/src/install/semver.zig index f9e16968f..e96b1cddc 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -1,3 +1,5 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; const bun = @import("bun"); const string = bun.string; const Output = bun.Output; @@ -8,7 +10,7 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const C = bun.C; -const std = @import("std"); +const IdentityContext = @import("../identity_context.zig").IdentityContext; /// String type that stores either an offset/length into an external buffer or a string inline directly pub const String = extern struct { @@ -301,11 +303,6 @@ pub const String = extern struct { } pub const Builder = struct { - const Allocator = @import("std").mem.Allocator; - const assert = @import("std").debug.assert; - const copy = @import("std").mem.copy; - const IdentityContext = @import("../identity_context.zig").IdentityContext; - len: usize = 0, cap: usize = 0, ptr: ?[*]u8 = null, @@ -335,7 +332,7 @@ pub const String = extern struct { else &[_]u8{}; } - pub fn allocate(this: *Builder, allocator: std.mem.Allocator) !void { + pub fn allocate(this: *Builder, allocator: Allocator) !void { var ptr_ = try allocator.alloc(u8, this.cap); this.ptr = ptr_.ptr; } @@ -359,14 +356,16 @@ pub const String = extern struct { } } - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first + } - copy(u8, this.ptr.?[this.len..this.cap], slice_); + bun.copy(u8, this.ptr.?[this.len..this.cap], slice_); const final_slice = this.ptr.?[this.len..this.cap][0..slice_.len]; this.len += slice_.len; - assert(this.len <= this.cap); + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); switch (Type) { String => { @@ -392,14 +391,16 @@ pub const String = extern struct { else => @compileError("Invalid type passed to StringBuilder"), } } - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first + } - copy(u8, this.ptr.?[this.len..this.cap], slice_); + bun.copy(u8, this.ptr.?[this.len..this.cap], slice_); const final_slice = this.ptr.?[this.len..this.cap][0..slice_.len]; this.len += slice_.len; - assert(this.len <= this.cap); + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); switch (Type) { String => { @@ -425,19 +426,21 @@ pub const String = extern struct { } } - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first + } var string_entry = this.string_pool.getOrPut(hash) catch unreachable; if (!string_entry.found_existing) { - copy(u8, this.ptr.?[this.len..this.cap], slice_); + bun.copy(u8, this.ptr.?[this.len..this.cap], slice_); const final_slice = this.ptr.?[this.len..this.cap][0..slice_.len]; this.len += slice_.len; string_entry.value_ptr.* = String.init(this.allocatedSlice(), final_slice); } - assert(this.len <= this.cap); + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); switch (Type) { String => { @@ -765,7 +768,7 @@ pub const Version = extern struct { pre = this.pre.value; } else { const pre_slice = this.pre.slice(slice); - std.mem.copy(u8, buf.*, pre_slice); + bun.copy(u8, buf.*, pre_slice); pre = String.init(buf.*, buf.*[0..pre_slice.len]); buf.* = buf.*[pre_slice.len..]; } @@ -774,7 +777,7 @@ pub const Version = extern struct { build = this.build.value; } else { const build_slice = this.build.slice(slice); - std.mem.copy(u8, buf.*, build_slice); + bun.copy(u8, buf.*, build_slice); build = String.init(buf.*, buf.*[0..build_slice.len]); buf.* = buf.*[build_slice.len..]; } @@ -811,11 +814,11 @@ pub const Version = extern struct { var multi_tag_warn = false; // TODO: support multiple tags - pub fn parse(allocator: std.mem.Allocator, sliced_string: SlicedString) TagResult { + pub fn parse(allocator: Allocator, sliced_string: SlicedString) TagResult { return parseWithPreCount(allocator, sliced_string, 0); } - pub fn parseWithPreCount(_: std.mem.Allocator, sliced_string: SlicedString, initial_pre_count: u32) TagResult { + pub fn parseWithPreCount(_: Allocator, sliced_string: SlicedString, initial_pre_count: u32) TagResult { var input = sliced_string.slice; var build_count: u32 = 0; var pre_count: u32 = initial_pre_count; @@ -929,7 +932,7 @@ pub const Version = extern struct { stopped_at: u32 = 0, }; - pub fn parse(sliced_string: SlicedString, allocator: std.mem.Allocator) ParseResult { + pub fn parse(sliced_string: SlicedString, allocator: Allocator) ParseResult { var input = sliced_string.slice; var result = ParseResult{}; @@ -1338,7 +1341,7 @@ pub const Query = struct { return lhs_next.eql(rhs_next); } - pub fn andRange(self: *List, allocator: std.mem.Allocator, range: Range) !void { + pub fn andRange(self: *List, allocator: Allocator, range: Range) !void { if (!self.head.range.hasLeft() and !self.head.range.hasRight()) { self.head.range = range; return; @@ -1359,7 +1362,7 @@ pub const Query = struct { pub const Group = struct { head: List = List{}, tail: ?*List = null, - allocator: std.mem.Allocator, + allocator: Allocator, input: string = "", flags: FlagsBitSet = FlagsBitSet.initEmpty(), @@ -1686,7 +1689,7 @@ pub const Query = struct { }; pub fn parse( - allocator: std.mem.Allocator, + allocator: Allocator, input: string, sliced: SlicedString, ) !Group { diff --git a/src/js_ast.zig b/src/js_ast.zig index 82032b058..dd5efd65b 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -103,14 +103,14 @@ pub fn NewBaseStore(comptime Union: anytype, comptime count: usize) type { var used = _self.overflow.allocator.dupe(*Block, _self.overflow.slice()) catch unreachable; var new_head = _self.overflow.allocator.create(Block) catch unreachable; - new_head.* = Block{}; + new_head.* = .{}; var to_move = _self.overflow.ptrs[0.._self.overflow.allocated][_self.overflow.used..]; if (to_move.len > 0) { to_move = to_move[1..]; } - std.mem.copyBackwards(*Block, _self.overflow.ptrs[1..], to_move); + bun.copy(*Block, _self.overflow.ptrs[1..], to_move); _self.overflow.ptrs[0] = new_head; _self.overflow.allocated = 1 + @truncate(Overflow.UsedSize, to_move.len); reset(); diff --git a/src/js_parser.zig b/src/js_parser.zig index 4a6fdac35..bc9d4e157 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -289,9 +289,9 @@ const JSXTag = struct { } var _name = try p.allocator.alloc(u8, name.len + 1 + member.len); - std.mem.copy(u8, _name, name); + bun.copy(u8, _name, name); _name[name.len] = '.'; - std.mem.copy(u8, _name[name.len + 1 .. _name.len], member); + bun.copy(u8, _name[name.len + 1 .. _name.len], member); name = _name; tag_range.len = member_range.loc.start + member_range.len - tag_range.loc.start; tag = p.newExpr(E.Dot{ .target = tag, .name = member, .name_loc = member_range.loc }, loc); @@ -3313,19 +3313,19 @@ pub const Parser = struct { var remaining_parts = _parts; if (before_len > 0) { const parts_to_copy = before.items; - std.mem.copy(js_ast.Part, remaining_parts, parts_to_copy); + bun.copy(js_ast.Part, remaining_parts, parts_to_copy); remaining_parts = remaining_parts[parts_to_copy.len..]; } if (parts_len > 0) { const parts_to_copy = parts.items; - std.mem.copy(js_ast.Part, remaining_parts, parts_to_copy); + bun.copy(js_ast.Part, remaining_parts, parts_to_copy); remaining_parts = remaining_parts[parts_to_copy.len..]; } if (after_len > 0) { const parts_to_copy = after.items; - std.mem.copy(js_ast.Part, remaining_parts, parts_to_copy); + bun.copy(js_ast.Part, remaining_parts, parts_to_copy); } parts_slice = _parts; @@ -4088,7 +4088,7 @@ fn NewParser_( for (parts_) |part| { if (part.tag == .none) { - std.mem.copy(Stmt, stmts_remain, part.stmts); + bun.copy(Stmt, stmts_remain, part.stmts); stmts_remain = stmts_remain[part.stmts.len..]; } } @@ -4582,12 +4582,8 @@ fn NewParser_( var clause_items = try allocator.alloc(js_ast.ClauseItem, imports.len); var stmts = try allocator.alloc(Stmt, 1 + if (additional_stmt != null) @as(usize, 1) else @as(usize, 0)); var declared_symbols = try allocator.alloc(js_ast.DeclaredSymbol, imports.len); - std.mem.copy(u8, namespace_identifier[0..suffix.len], suffix); - std.mem.copy( - u8, - namespace_identifier[suffix.len..namespace_identifier.len], - import_path_identifier[0..import_path_identifier.len], - ); + bun.copy(u8, namespace_identifier, suffix); + bun.copy(u8, namespace_identifier[suffix.len..], import_path_identifier); const namespace_ref = try p.newSymbol(.other, namespace_identifier); try p.module_scope.generated.append(allocator, namespace_ref); @@ -12739,7 +12735,7 @@ fn NewParser_( const end_tag = try JSXTag.parse(P, p); if (!strings.eql(end_tag.name, tag.name)) { - try p.log.addRangeErrorFmt(p.source, end_tag.range, p.allocator, "Expected closing tag </{s}> to match opening tag <{s}>", .{ + try p.log.addRangeErrorFmt(p.source, end_tag.range, p.allocator, "Expected closing tag \\</{s}> to match opening tag \\<{s}>", .{ end_tag.name, tag.name, }); @@ -13333,7 +13329,7 @@ fn NewParser_( p.source, tag.loc, p.allocator, - "<{s} /> is a void element and must not have \"children\"", + "\\<{s} /> is a void element and must not have \"children\"", .{tag.data.e_string.slice(p.allocator)}, ) catch {}; } @@ -13352,7 +13348,7 @@ fn NewParser_( if (e_.properties.len > 0) { if (e_.key) |key| { var props = p.allocator.alloc(G.Property, e_.properties.len + 1) catch unreachable; - std.mem.copy(G.Property, props, e_.properties.slice()); + bun.copy(G.Property, props, e_.properties.slice()); props[props.len - 1] = G.Property{ .key = Expr{ .loc = key.loc, .data = keyExprData }, .value = key }; args[1] = p.newExpr(E.Object{ .properties = G.Property.List.init(props) }, expr.loc); } else { @@ -17565,7 +17561,7 @@ fn NewParser_( // are not allowed to assign to this symbol (it throws a TypeError). const name = p.symbols.items[class_name_ref.innerIndex()].original_name; var identifier = p.allocator.alloc(u8, name.len + 1) catch unreachable; - std.mem.copy(u8, identifier[1..identifier.len], name); + bun.copy(u8, identifier[1..identifier.len], name); identifier[0] = '_'; shadow_ref = p.newSymbol(Symbol.Kind.cconst, identifier) catch unreachable; p.recordDeclaredSymbol(shadow_ref) catch unreachable; @@ -17728,7 +17724,7 @@ fn NewParser_( )) catch unreachable; // O(N) class_body.items.len += 1; - std.mem.copyBackwards(G.Property, class_body.items[j + 1 .. class_body.items.len], class_body.items[j .. class_body.items.len - 1]); + bun.copy(G.Property, class_body.items[j + 1 ..], class_body.items[j .. class_body.items.len - 1]); class_body.items[j] = G.Property{ .key = ident }; j += 1; }, @@ -18649,8 +18645,8 @@ fn NewParser_( var export_name_string = export_name_string_remainder[0 .. named_export.key_ptr.len + "$$hmr_".len]; export_name_string_remainder = export_name_string_remainder[export_name_string.len..]; - std.mem.copy(u8, export_name_string, "$$hmr_"); - std.mem.copy(u8, export_name_string["$$hmr_".len..], named_export.key_ptr.*); + bun.copy(u8, export_name_string, "$$hmr_"); + bun.copy(u8, export_name_string["$$hmr_".len..], named_export.key_ptr.*); var name_ref = try p.declareSymbol(.other, logger.Loc.Empty, export_name_string); @@ -18892,7 +18888,7 @@ fn NewParser_( // const named_import = named_import_entry.value; // var buf = try p.allocator.alloc(u32, named_import.local_parts_with_uses.len + 1); // if (named_import.local_parts_with_uses.len > 0) { - // std.mem.copy(u32, buf, named_import.local_parts_with_uses); + // bun.copy(u32, buf, named_import.local_parts_with_uses); // } // buf[buf.len - 1] = @intCast(u32, i); // named_import_entry.value.local_parts_with_uses = buf; diff --git a/src/json_parser.zig b/src/json_parser.zig index 4c6843be4..956d23262 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -471,7 +471,7 @@ pub const PackageJSONVersionChecker = struct { p.found_name_buf.len, ); - std.mem.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); + bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); p.found_name = p.found_name_buf[0..len]; p.has_found_name = true; } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { @@ -479,7 +479,7 @@ pub const PackageJSONVersionChecker = struct { value.data.e_string.data.len, p.found_version_buf.len, ); - std.mem.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); + bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); p.found_version = p.found_version_buf[0..len]; p.has_found_version = true; } @@ -918,7 +918,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { Stmt.Data.Store.reset(); } var contents = default_allocator.alloc(u8, _contents.len + 1) catch unreachable; - std.mem.copy(u8, contents, _contents); + bun.copy(u8, contents, _contents); contents[contents.len - 1] = ';'; var log = logger.Log.init(default_allocator); defer log.msgs.deinit(); diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index 8d0e86d76..5b818a49b 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -450,7 +450,7 @@ pub const Archive = struct { break :brk __pathname[0..index]; }; var temp_buf: [1024]u8 = undefined; - std.mem.copy(u8, &temp_buf, path_to_use_); + bun.copy(u8, &temp_buf, path_to_use_); var path_to_use: string = temp_buf[0..path_to_use_.len]; if (!is_already_top_level) { temp_buf[path_to_use_.len] = std.fs.path.sep; diff --git a/src/linker.zig b/src/linker.zig index 8e4daf5b9..b8f1da526 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -802,7 +802,7 @@ pub const Linker = struct { if (result.ast.needs_runtime and (result.ast.runtime_import_record_id == null or import_records.len == 0)) { var new_import_records = try linker.allocator.alloc(ImportRecord, import_records.len + 1); - std.mem.copy(ImportRecord, new_import_records, import_records); + bun.copy(ImportRecord, new_import_records, import_records); new_import_records[new_import_records.len - 1] = ImportRecord{ .kind = .stmt, @@ -896,11 +896,11 @@ pub const Linker = struct { const basename = try linker.getHashedFilename(basepath, null); var dir = basepath.name.dirWithTrailingSlash(); var _pretty = try linker.allocator.alloc(u8, dir.len + basename.len + basepath.name.ext.len); - std.mem.copy(u8, _pretty, dir); + bun.copy(u8, _pretty, dir); var remaining_pretty = _pretty[dir.len..]; - std.mem.copy(u8, remaining_pretty, basename); + bun.copy(u8, remaining_pretty, basename); remaining_pretty = remaining_pretty[basename.len..]; - std.mem.copy(u8, remaining_pretty, basepath.name.ext); + bun.copy(u8, remaining_pretty, basepath.name.ext); pretty = _pretty; relative_name = try linker.allocator.dupe(u8, relative_name); } else { diff --git a/src/linux_c.zig b/src/linux_c.zig index 8f77db8c7..04faf210b 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -369,7 +369,7 @@ pub fn get_system_loadavg() [3]f64 { pub fn get_version(name_buffer: *[std.os.HOST_NAME_MAX]u8) []const u8 { const uts = std.os.uname(); const result = bun.sliceTo(&uts.version, 0); - std.mem.copy(u8, name_buffer, result); + bun.copy(u8, name_buffer, result); return name_buffer[0..result.len]; } @@ -377,7 +377,7 @@ pub fn get_version(name_buffer: *[std.os.HOST_NAME_MAX]u8) []const u8 { pub fn get_release(name_buffer: *[std.os.HOST_NAME_MAX]u8) []const u8 { const uts = std.os.uname(); const result = bun.sliceTo(&uts.release, 0); - std.mem.copy(u8, name_buffer, result); + bun.copy(u8, name_buffer, result); return name_buffer[0..result.len]; } diff --git a/src/logger.zig b/src/logger.zig index da06487b9..0da7789b4 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -653,7 +653,7 @@ pub const Log = struct { pub fn addVerbose(log: *Log, source: ?*const Source, loc: Loc, text: string) !void { @setCold(true); - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .verbose, .data = rangeData(source, Range{ .loc = loc }, text), }); @@ -780,13 +780,20 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.verbose, log.level)) return; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .verbose, .data = rangeData(source, Range{ .loc = loc }, text), .notes = notes, }); } + inline fn allocPrint(allocator: std.mem.Allocator, comptime fmt: string, args: anytype) !string { + return if (Output.enable_ansi_colors) + try std.fmt.allocPrint(allocator, Output.prettyFmt(fmt, true), args) + else + try std.fmt.allocPrint(allocator, Output.prettyFmt(fmt, false), args); + } + inline fn _addResolveErrorWithLevel( log: *Log, source: *const Source, @@ -799,7 +806,7 @@ pub const Log = struct { comptime is_error: bool, err: anyerror, ) !void { - const text = try std.fmt.allocPrint(allocator, fmt, args); + const text = try allocPrint(allocator, fmt, args); // TODO: fix this. this is stupid, it should be returned in allocPrint. const specifier = BabyString.in(text, args.@"0"); if (comptime is_error) { @@ -915,7 +922,7 @@ pub const Log = struct { pub fn addRangeError(log: *Log, source: ?*const Source, r: Range, text: string) !void { @setCold(true); log.errors += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .err, .data = rangeData(source, r, text), }); @@ -924,18 +931,18 @@ pub const Log = struct { pub fn addRangeErrorFmt(log: *Log, source: ?*const Source, r: Range, allocator: std.mem.Allocator, comptime text: string, args: anytype) !void { @setCold(true); log.errors += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .err, - .data = rangeData(source, r, std.fmt.allocPrint(allocator, text, args) catch unreachable), + .data = rangeData(source, r, allocPrint(allocator, text, args) catch unreachable), }); } pub fn addRangeErrorFmtWithNotes(log: *Log, source: ?*const Source, r: Range, allocator: std.mem.Allocator, notes: []Data, comptime text: string, args: anytype) !void { @setCold(true); log.errors += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .err, - .data = rangeData(source, r, std.fmt.allocPrint(allocator, text, args) catch unreachable), + .data = rangeData(source, r, allocPrint(allocator, text, args) catch unreachable), .notes = notes, }); } @@ -943,9 +950,9 @@ pub const Log = struct { pub fn addErrorFmt(log: *Log, source: ?*const Source, l: Loc, allocator: std.mem.Allocator, comptime text: string, args: anytype) !void { @setCold(true); log.errors += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .err, - .data = rangeData(source, Range{ .loc = l }, std.fmt.allocPrint(allocator, text, args) catch unreachable), + .data = rangeData(source, Range{ .loc = l }, allocPrint(allocator, text, args) catch unreachable), }); } @@ -953,7 +960,7 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .warn, .data = rangeData(source, r, text), }); @@ -963,9 +970,9 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .warn, - .data = rangeData(source, Range{ .loc = l }, std.fmt.allocPrint(allocator, text, args) catch unreachable), + .data = rangeData(source, Range{ .loc = l }, allocPrint(allocator, text, args) catch unreachable), }); } @@ -973,9 +980,9 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .warn, - .data = rangeData(source, r, std.fmt.allocPrint(allocator, text, args) catch unreachable), + .data = rangeData(source, r, allocPrint(allocator, text, args) catch unreachable), }); } @@ -995,11 +1002,11 @@ pub const Log = struct { log.warnings += 1; var notes = try allocator.alloc(Data, 1); - notes[0] = rangeData(source, note_range, std.fmt.allocPrint(allocator, note_fmt, note_args) catch unreachable); + notes[0] = rangeData(source, note_range, allocPrint(allocator, note_fmt, note_args) catch unreachable); - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .warn, - .data = rangeData(source, r, std.fmt.allocPrint(allocator, fmt, args) catch unreachable), + .data = rangeData(source, r, allocPrint(allocator, fmt, args) catch unreachable), .notes = notes, }); } @@ -1008,7 +1015,7 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .warn, .data = rangeData(source, Range{ .loc = l }, text), }); @@ -1017,7 +1024,7 @@ pub const Log = struct { pub fn addRangeDebug(log: *Log, source: ?*const Source, r: Range, text: string) !void { @setCold(true); if (!Kind.shouldPrint(.debug, log.level)) return; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .debug, .data = rangeData(source, r, text), }); @@ -1027,7 +1034,7 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.debug, log.level)) return; // log.de += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = Kind.debug, .data = rangeData(source, r, text), .notes = notes, @@ -1037,7 +1044,7 @@ pub const Log = struct { pub fn addRangeErrorWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) !void { @setCold(true); log.errors += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = Kind.err, .data = rangeData(source, r, text), .notes = notes, @@ -1048,7 +1055,7 @@ pub const Log = struct { @setCold(true); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; - try log.addMsg(Msg{ + try log.addMsg(.{ .kind = .warning, .data = rangeData(source, r, text), .notes = notes, @@ -1073,7 +1080,7 @@ pub const Log = struct { pub fn addError(self: *Log, _source: ?*const Source, loc: Loc, text: string) !void { @setCold(true); self.errors += 1; - try self.addMsg(Msg{ .kind = .err, .data = rangeData(_source, Range{ .loc = loc }, text) }); + try self.addMsg(.{ .kind = .err, .data = rangeData(_source, Range{ .loc = loc }, text) }); } pub fn printForLogLevel(self: *Log, to: anytype) !void { diff --git a/src/open.zig b/src/open.zig index a496e9138..853b51abd 100644 --- a/src/open.zig +++ b/src/open.zig @@ -350,7 +350,7 @@ pub const EditorContext = struct { var basename_buf: [512]u8 = undefined; var basename = std.fs.path.basename(id); if (strings.endsWith(basename, ".bun") and basename.len < 499) { - std.mem.copy(u8, &basename_buf, basename); + bun.copy(u8, &basename_buf, basename); basename_buf[basename.len..][0..3].* = ".js".*; basename = basename_buf[0 .. basename.len + 3]; } diff --git a/src/options.zig b/src/options.zig index bf0c21d6b..addc67a8c 100644 --- a/src/options.zig +++ b/src/options.zig @@ -2045,7 +2045,7 @@ pub const EntryPoint = struct { var out = try allocator.alloc(u8, str.len + 2); out[0] = '.'; out[1] = '/'; - std.mem.copy(u8, out[2..], str); + bun.copy(u8, out[2..], str); return out; } } diff --git a/src/output.zig b/src/output.zig index e46e824e0..cb57343ad 100644 --- a/src/output.zig +++ b/src/output.zig @@ -419,7 +419,7 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f }.log; } -// Valid colors: +// Valid "colors": // <black> // <blue> // <cyan> @@ -432,7 +432,7 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f // <d> - dim // </r> - reset // <r> - reset -pub const ED = "\x1b["; +const ED = "\x1b["; pub const color_map = ComptimeStringMap(string, .{ &.{ "black", ED ++ "30m" }, &.{ "blue", ED ++ "34m" }, @@ -446,7 +446,7 @@ pub const color_map = ComptimeStringMap(string, .{ &.{ "white", ED ++ "37m" }, &.{ "yellow", ED ++ "33m" }, }); -pub const RESET = "\x1b[0m"; +const RESET: string = "\x1b[0m"; pub fn prettyFmt(comptime fmt: string, comptime is_enabled: bool) string { comptime var new_fmt: [fmt.len * 4]u8 = undefined; comptime var new_fmt_i: usize = 0; @@ -505,20 +505,11 @@ pub fn prettyFmt(comptime fmt: string, comptime is_enabled: bool) string { @compileError("Invalid color name passed: " ++ color_name); } }; - var orig = new_fmt_i; if (is_enabled) { - if (!is_reset) { - orig = new_fmt_i; - new_fmt_i += color_str.len; - std.mem.copy(u8, new_fmt[orig..new_fmt_i], color_str); - } - - if (is_reset) { - const reset_sequence = RESET; - orig = new_fmt_i; - new_fmt_i += reset_sequence.len; - std.mem.copy(u8, new_fmt[orig..new_fmt_i], reset_sequence); + for (if (is_reset) RESET else color_str) |ch| { + new_fmt[new_fmt_i] = ch; + new_fmt_i += 1; } } }, diff --git a/src/resolver/data_url.zig b/src/resolver/data_url.zig index 5404b0591..1ce6381fb 100644 --- a/src/resolver/data_url.zig +++ b/src/resolver/data_url.zig @@ -10,9 +10,7 @@ const default_allocator = bun.default_allocator; const C = bun.C; const std = @import("std"); -const assert = std.debug.assert; -const mem = std.mem; -const Allocator = mem.Allocator; +const Allocator = std.mem.Allocator; const ComptimeStringMap = @import("../comptime_string_map.zig").ComptimeStringMap; // https://github.com/Vexu/zuri/blob/master/src/zuri.zig#L61-L127 @@ -33,7 +31,7 @@ pub const PercentEncoding = struct { /// returns true if str starts with a valid path character or a percent encoded octet pub fn isPchar(str: []const u8) bool { - if (Environment.allow_assert) assert(str.len > 0); + if (comptime Environment.allow_assert) std.debug.assert(str.len > 0); return switch (str[0]) { 'a'...'z', 'A'...'Z', '0'...'9', '-', '.', '_', '~', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '@' => true, '%' => str.len > 3 and isHex(str[1]) and isHex(str[2]), @@ -55,7 +53,7 @@ pub const PercentEncoding = struct { } if (ret == null) { ret = try allocator.alloc(u8, path.len); - mem.copy(u8, ret.?, path[0..i]); + bun.copy(u8, ret, path[0..i]); ret_index = i; } diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index 25b2ca673..0785e0493 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -60,12 +60,12 @@ pub const PackageJSON = struct { var hashy: [1024]u8 = undefined; std.mem.set(u8, &hashy, 0); var used: usize = 0; - std.mem.copy(u8, &hashy, package_json.name); + bun.copy(u8, &hashy, package_json.name); used = package_json.name.len; hashy[used] = '@'; used += 1; - std.mem.copy(u8, hashy[used..], package_json.version); + bun.copy(u8, hashy[used..], package_json.version); used += package_json.version.len; package_json.hash = std.hash.Murmur3_32.hash(hashy[0..used]); @@ -80,9 +80,8 @@ pub const PackageJSON = struct { if (strings.indexOf(parent, fs.FileSystem.instance.top_level_dir)) |i| { const relative_dir = parent[i + fs.FileSystem.instance.top_level_dir.len ..]; var out_dir = try allocator.alloc(u8, relative_dir.len + 2); - std.mem.copy(u8, out_dir[2..], relative_dir); - out_dir[0] = '.'; - out_dir[1] = '/'; + bun.copy(u8, out_dir[2..], relative_dir); + out_dir[0..2].* = ("." ++ std.fs.path.sep_str).*; return out_dir; } @@ -1346,8 +1345,8 @@ pub const ESModule = struct { } pub fn parseSubpath(subpath: *[]const u8, specifier: string, subpath_buf: []u8) void { - std.mem.copy(u8, subpath_buf[1..], specifier); subpath_buf[0] = '.'; + bun.copy(u8, subpath_buf[1..], specifier); subpath.* = subpath_buf[0 .. specifier.len + 1]; } }; diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index 20f889c2c..c44c2597d 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -202,7 +202,7 @@ pub fn relativeToCommonPath( // We get here if `from` is the root // For example: from='/'; to='/foo' if (always_copy) { - std.mem.copy(u8, buf, normalized_to); + bun.copy(u8, buf, normalized_to); return buf[0..normalized_to.len]; } else { return normalized_to; @@ -213,10 +213,9 @@ pub fn relativeToCommonPath( const slice = normalized_to[common_path.len..]; if (always_copy) { - // We get here if `from` is the exact base path for `to`. // For example: from='/foo/bar'; to='/foo/bar/baz' - std.mem.copy(u8, buf, slice); + bun.copy(u8, buf, slice); return buf[0..slice.len]; } else { return slice; @@ -268,7 +267,7 @@ pub fn relativeToCommonPath( const start = out_slice.len; out_slice = buf[0 .. out_slice.len + tail.len]; - std.mem.copy(u8, out_slice[start..], tail); + bun.copy(u8, out_slice[start..], tail); } return buf[0..out_slice.len]; @@ -690,11 +689,7 @@ pub fn joinStringBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [ written += 1; } - std.mem.copy( - u8, - temp_buf[written..], - part, - ); + bun.copy(u8, temp_buf[written..], part); written += part.len; } @@ -752,7 +747,7 @@ inline fn _joinAbsStringBuf(comptime is_sentinel: bool, comptime ReturnType: typ } } - std.mem.copy(u8, &temp_buf, cwd); + bun.copy(u8, &temp_buf, cwd); out = cwd.len; for (parts) |_part| { @@ -767,12 +762,11 @@ inline fn _joinAbsStringBuf(comptime is_sentinel: bool, comptime ReturnType: typ out += 1; } - std.mem.copy(u8, temp_buf[out..], part); + bun.copy(u8, temp_buf[out..], part); out += part.len; } - const leading_separator: []const u8 = - if (_platform.leadingSeparatorIndex(temp_buf[0..out])) |i| + const leading_separator: []const u8 = if (_platform.leadingSeparatorIndex(temp_buf[0..out])) |i| temp_buf[0 .. i + 1] else "/"; @@ -785,7 +779,7 @@ inline fn _joinAbsStringBuf(comptime is_sentinel: bool, comptime ReturnType: typ true, ); - std.mem.copy(u8, buf[0..leading_separator.len], leading_separator); + bun.copy(u8, buf, leading_separator); if (comptime is_sentinel) { buf.ptr[result.len + leading_separator.len] = 0; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 45c0262fd..c2e00c22c 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -295,8 +295,9 @@ pub const DebugLogs = struct { const len = d.indent.len(); if (len > 0) { var __text = d.notes.allocator.alloc(u8, text.len + len) catch unreachable; - std.mem.copy(u8, __text, d.indent.list.items); - std.mem.copy(u8, __text[len..__text.len], _text); + bun.copy(u8, __text, d.indent.list.items); + bun.copy(u8, __text[len..], _text); + text = __text; d.notes.allocator.free(_text); } @@ -593,7 +594,7 @@ pub const Resolver = struct { } prefixed_package_buf[0..bunFrameworkPackagePrefix.len].* = bunFrameworkPackagePrefix.*; - std.mem.copy(u8, prefixed_package_buf[bunFrameworkPackagePrefix.len..], package); + bun.copy(u8, prefixed_package_buf[bunFrameworkPackagePrefix.len..], package); const prefixed_name = prefixed_package_buf[0 .. bunFrameworkPackagePrefix.len + package.len]; return r._resolveFramework(prefixed_name, pair, preference, load_defines) catch |err| { switch (err) { @@ -692,7 +693,7 @@ pub const Resolver = struct { }; var out = try r.allocator.alloc(u8, chosen_dir.len + 1); - std.mem.copy(u8, out, chosen_dir); + bun.copy(u8, out, chosen_dir); out[out.len - 1] = '/'; pair.router.dir = out; pair.router.routes_enabled = true; @@ -2036,10 +2037,10 @@ pub const Resolver = struct { // Try to have a friendly error message if people forget the extension if (ends_with_star) { - std.mem.copy(u8, &load_as_file_buf, base); + bun.copy(u8, &load_as_file_buf, base); for (extension_order) |ext| { var file_name = load_as_file_buf[0 .. base.len + ext.len]; - std.mem.copy(u8, file_name[base.len..], ext); + bun.copy(u8, file_name[base.len..], ext); if (entries.get(file_name) != null) { if (r.debug_logs) |*debug| { const parts = [_]string{ package_json.name, package_subpath }; @@ -2063,10 +2064,10 @@ pub const Resolver = struct { if (r.dirInfoCached(abs_esm_path) catch null) |dir_info| { if (dir_info.getEntries()) |dir_entries| { const index = "index"; - std.mem.copy(u8, &load_as_file_buf, index); + bun.copy(u8, &load_as_file_buf, index); for (extension_order) |ext| { var file_name = load_as_file_buf[0 .. index.len + ext.len]; - std.mem.copy(u8, file_name[index.len..], ext); + bun.copy(u8, file_name[index.len..], ext); const index_query = dir_entries.get(file_name); if (index_query != null and index_query.?.entry.kind(&r.fs.fs) == .file) { missing_suffix = std.fmt.allocPrint(r.allocator, "/{s}", .{file_name}) catch unreachable; @@ -2268,7 +2269,7 @@ pub const Resolver = struct { } var i: i32 = 1; - std.mem.copy(u8, &dir_info_uncached_path_buf, _path); + bun.copy(u8, &dir_info_uncached_path_buf, _path); var path = dir_info_uncached_path_buf[0.._path.len]; _dir_entry_paths_to_resolve[0] = (DirEntryResolveQueueItem{ .result = top_result, .unsafe_path = path, .safe_path = "" }); @@ -2701,11 +2702,11 @@ pub const Resolver = struct { return true; } - std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned); + bun.copy(u8, &TemporaryBuffer.ExtensionPathBuf, cleaned); // If that failed, try adding implicit extensions for (this.extension_order) |ext| { - std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len .. cleaned.len + ext.len], ext); + bun.copy(u8, TemporaryBuffer.ExtensionPathBuf[cleaned.len..], ext); const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. cleaned.len + ext.len]; // if (r.debug_logs) |*debug| { // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}); @@ -2732,10 +2733,10 @@ pub const Resolver = struct { return true; } - std.mem.copy(u8, &TemporaryBuffer.ExtensionPathBuf, index_path); + bun.copy(u8, &TemporaryBuffer.ExtensionPathBuf, index_path); for (this.extension_order) |ext| { - std.mem.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len .. index_path.len + ext.len], ext); + bun.copy(u8, TemporaryBuffer.ExtensionPathBuf[index_path.len..], ext); const new_path = TemporaryBuffer.ExtensionPathBuf[0 .. index_path.len + ext.len]; // if (r.debug_logs) |*debug| { // debug.addNoteFmt("Checking for \"{s}\" ", .{new_path}); @@ -2805,7 +2806,7 @@ pub const Resolver = struct { switch (comptime kind) { .AbsolutePath => { BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*; - std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); + bun.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) { return checker.remapped; } @@ -2825,7 +2826,7 @@ pub const Resolver = struct { if (isInSamePackage) { BrowserMapPath.abs_to_rel_buf[0..2].* = "./".*; - std.mem.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); + bun.copy(u8, BrowserMapPath.abs_to_rel_buf[2..], checker.input_path); if (checker.checkPath(BrowserMapPath.abs_to_rel_buf[0 .. checker.input_path.len + 2])) { return checker.remapped; @@ -2916,7 +2917,7 @@ pub const Resolver = struct { for (extension_order) |ext| { var base = TemporaryBuffer.ExtensionPathBuf[0 .. "index".len + ext.len]; base[0.."index".len].* = "index".*; - std.mem.copy(u8, base["index".len..base.len], ext); + bun.copy(u8, base["index".len..], ext); if (dir_info.getEntries()) |entries| { if (entries.get(base)) |lookup| { @@ -2966,7 +2967,7 @@ pub const Resolver = struct { // In order for our path handling logic to be correct, it must end with a trailing slash. var path = path_; if (!strings.endsWithChar(path_, std.fs.path.sep)) { - std.mem.copy(u8, &remap_path_trailing_slash, path); + bun.copy(u8, &remap_path_trailing_slash, path); remap_path_trailing_slash[path.len] = std.fs.path.sep; remap_path_trailing_slash[path.len + 1] = 0; path = remap_path_trailing_slash[0 .. path.len + 1]; @@ -3241,10 +3242,10 @@ pub const Resolver = struct { } // Try the path with extensions - std.mem.copy(u8, &load_as_file_buf, path); + bun.copy(u8, &load_as_file_buf, path); for (extension_order) |ext| { var buffer = load_as_file_buf[0 .. path.len + ext.len]; - std.mem.copy(u8, buffer[path.len..buffer.len], ext); + bun.copy(u8, buffer[path.len..], ext); const file_name = buffer[path.len - base.len .. buffer.len]; if (r.debug_logs) |*debug| { @@ -3295,7 +3296,7 @@ pub const Resolver = struct { if (strings.eqlComptime(ext, ".js") or strings.eqlComptime(ext, ".jsx")) { const segment = base[0..last_dot]; var tail = load_as_file_buf[path.len - base.len ..]; - std.mem.copy(u8, tail, segment); + bun.copy(u8, tail, segment); const exts = .{ ".ts", ".tsx" }; diff --git a/src/router.zig b/src/router.zig index c7dadcdd3..f4ea731d3 100644 --- a/src/router.zig +++ b/src/router.zig @@ -668,17 +668,13 @@ pub const Route = struct { if (public_dir.len > 0) { route_file_buf[0] = '/'; buf = buf[1..]; - std.mem.copy( - u8, - buf, - public_dir, - ); + bun.copy(u8, buf, public_dir); } buf[public_dir.len] = '/'; buf = buf[public_dir.len + 1 ..]; - std.mem.copy(u8, buf, base); + bun.copy(u8, buf, base); buf = buf[base.len..]; - std.mem.copy(u8, buf, extname); + bun.copy(u8, buf, extname); buf = buf[extname.len..]; break :brk route_file_buf[0 .. @ptrToInt(buf.ptr) - @ptrToInt(&route_file_buf)]; }; diff --git a/src/string_builder.zig b/src/string_builder.zig index 23e83917b..e3921568b 100644 --- a/src/string_builder.zig +++ b/src/string_builder.zig @@ -1,11 +1,11 @@ -const string = @import("string_types.zig").string; -const Allocator = @import("std").mem.Allocator; -const assert = @import("std").debug.assert; -const copy = @import("std").mem.copy; -const Env = @import("./env.zig"); +const std = @import("std"); +const Allocator = std.mem.Allocator; const bun = @import("bun"); +const Environment = bun.Environment; +const string = @import("string_types.zig").string; const StringBuilder = @This(); -const DebugHashTable = if (Env.allow_assert) std.AutoHashMapUnmanaged(u64, void) else void; + +const DebugHashTable = if (Environment.allow_assert) std.AutoHashMapUnmanaged(u64, void) else void; len: usize = 0, cap: usize = 0, @@ -15,7 +15,7 @@ debug_only_checker: DebugHashTable = DebugHashTable{}, pub fn count(this: *StringBuilder, slice: string) void { this.cap += slice.len; - if (comptime Env.allow_assert) { + if (comptime Environment.allow_assert) { _ = this.debug_only_checker.getOrPut(bun.default_allocator, bun.hash(slice)) catch unreachable; } } @@ -29,47 +29,39 @@ pub fn allocate(this: *StringBuilder, allocator: Allocator) !void { pub fn deinit(this: *StringBuilder, allocator: Allocator) void { if (this.ptr == null or this.cap == 0) return; allocator.free(this.ptr.?[0..this.cap]); - if (comptime Env.allow_assert) { + if (comptime Environment.allow_assert) { this.debug_only_checker.deinit(bun.default_allocator); this.debug_only_checker = .{}; } } pub fn append(this: *StringBuilder, slice: string) string { - if (comptime Env.allow_assert) { - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first - } - - if (comptime Env.allow_assert) { - assert(this.debug_only_checker.contains(bun.hash(slice))); + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first + std.debug.assert(this.debug_only_checker.contains(bun.hash(slice))); } bun.copy(u8, this.ptr.?[this.len..this.cap], slice); const result = this.ptr.?[this.len..this.cap][0..slice.len]; this.len += slice.len; - if (Env.allow_assert) { - assert(this.len <= this.cap); - } + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); return result; } -const std = @import("std"); pub fn fmt(this: *StringBuilder, comptime str: string, args: anytype) string { - if (Env.allow_assert) { - assert(this.len <= this.cap); // didn't count everything - assert(this.ptr != null); // must call allocate first + if (comptime Environment.allow_assert) { + std.debug.assert(this.len <= this.cap); // didn't count everything + std.debug.assert(this.ptr != null); // must call allocate first } var buf = this.ptr.?[this.len..this.cap]; const out = std.fmt.bufPrint(buf, str, args) catch unreachable; this.len += out.len; - if (Env.allow_assert) { - assert(this.len <= this.cap); - } + if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap); return out; } @@ -80,7 +72,9 @@ pub fn fmtCount(this: *StringBuilder, comptime str: string, args: anytype) void pub fn allocatedSlice(this: *StringBuilder) []u8 { var ptr = this.ptr orelse return &[_]u8{}; - std.debug.assert(this.cap > 0); - std.debug.assert(this.len > 0); + if (comptime Environment.allow_assert) { + std.debug.assert(this.cap > 0); + std.debug.assert(this.len > 0); + } return ptr[0..this.cap]; } diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 2f9612c23..ee65d1224 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -269,8 +269,8 @@ pub const SplitIterator = struct { pub fn cat(allocator: std.mem.Allocator, first: string, second: string) !string { var out = try allocator.alloc(u8, first.len + second.len); - std.mem.copy(u8, out, first); - std.mem.copy(u8, out[first.len..], second); + bun.copy(u8, out, first); + bun.copy(u8, out[first.len..], second); return out; } @@ -367,18 +367,16 @@ pub const StringOrTinyString = struct { }; pub fn copyLowercase(in: string, out: []u8) string { - var in_slice: string = in; - var out_slice: []u8 = out[0..in.len]; + var in_slice = in; + var out_slice = out; - begin: while (out_slice.len > 0) { + begin: while (in_slice.len > 0) { for (in_slice, 0..) |c, i| { switch (c) { 'A'...'Z' => { - // @memcpy(out_slice.ptr, in_slice.ptr, i); - std.mem.copy(u8, out_slice, in_slice); + bun.copy(u8, out_slice, in_slice[0..i]); out_slice[i] = std.ascii.toLower(c); const end = i + 1; - if (end >= out_slice.len) break :begin; in_slice = in_slice[end..]; out_slice = out_slice[end..]; continue :begin; @@ -387,8 +385,7 @@ pub fn copyLowercase(in: string, out: []u8) string { } } - // @memcpy(out_slice.ptr, in_slice.ptr, in_slice.len); - std.mem.copy(u8, out_slice, in_slice); + bun.copy(u8, out_slice, in_slice); break :begin; } @@ -904,7 +901,7 @@ pub inline fn joinBuf(out: []u8, parts: anytype, comptime parts_len: usize) []u8 comptime var i: usize = 0; inline while (i < parts_len) : (i += 1) { const part = parts[i]; - std.mem.copy(u8, remain, part); + bun.copy(u8, remain, part); remain = remain[part.len..]; count += part.len; } @@ -2233,8 +2230,8 @@ pub fn escapeHTMLForUTF16Input(allocator: std.mem.Allocator, utf16: []const u16) } var buf = allocator.alloc(u16, first_16.len + second_16.len) catch unreachable; - std.mem.copy(u16, buf, first_16); - std.mem.copy(u16, buf[first_16.len..], second_16); + bun.copy(u16, buf, first_16); + bun.copy(u16, buf[first_16.len..], second_16); return Escaped(u16){ .allocated = buf }; }, @@ -4164,7 +4161,7 @@ pub fn cloneNormalizingSeparators( while (tokenized.next()) |token| { if (token.len == 0) continue; - std.mem.copy(u8, remain, token); + bun.copy(u8, remain, token); remain[token.len..][0] = std.fs.path.sep; remain = remain[token.len + 1 ..]; } diff --git a/src/string_joiner.zig b/src/string_joiner.zig index d6440008a..490989c9a 100644 --- a/src/string_joiner.zig +++ b/src/string_joiner.zig @@ -1,19 +1,15 @@ /// Rope-like data structure for joining many small strings into one big string. -const Joiner = @This(); - +const std = @import("std"); +const default_allocator = @import("bun").default_allocator; const string = @import("string_types.zig").string; -const Allocator = @import("std").mem.Allocator; -const assert = @import("std").debug.assert; -const copy = @import("std").mem.copy; -const Env = @import("./env.zig"); +const Allocator = std.mem.Allocator; const ObjectPool = @import("./pool.zig").ObjectPool; - -const default_allocator = @import("bun").default_allocator; +const Joiner = @This(); const Joinable = struct { offset: u31 = 0, needs_deinit: bool = false, - allocator: std.mem.Allocator = undefined, + allocator: Allocator = undefined, slice: []const u8 = "", pub const Pool = ObjectPool(Joinable, null, true, 4); @@ -22,12 +18,12 @@ const Joinable = struct { last_byte: u8 = 0, len: usize = 0, use_pool: bool = true, -node_allocator: std.mem.Allocator = undefined, +node_allocator: Allocator = undefined, head: ?*Joinable.Pool.Node = null, tail: ?*Joinable.Pool.Node = null, -pub fn done(this: *Joiner, allocator: std.mem.Allocator) ![]u8 { +pub fn done(this: *Joiner, allocator: Allocator) ![]u8 { if (this.head == null) { var out: []u8 = &[_]u8{}; return out; @@ -64,7 +60,7 @@ pub fn lastByte(this: *const Joiner) u8 { return 0; } -pub fn append(this: *Joiner, slice: string, offset: u32, allocator: ?std.mem.Allocator) void { +pub fn append(this: *Joiner, slice: string, offset: u32, allocator: ?Allocator) void { const data = slice[offset..]; this.len += @truncate(u32, data.len); @@ -91,5 +87,3 @@ pub fn append(this: *Joiner, slice: string, offset: u32, allocator: ?std.mem.All tail.next = new_tail; this.tail = new_tail; } - -const std = @import("std"); diff --git a/src/url.zig b/src/url.zig index 034a90ecf..1db48f24a 100644 --- a/src/url.zig +++ b/src/url.zig @@ -161,7 +161,7 @@ pub const URL = struct { var buf_i: usize = 0; for (path_parts[0..path_end]) |part| { - std.mem.copy(u8, buf[buf_i..], part); + bun.copy(u8, buf[buf_i..], part); buf_i += part.len; } return resolve_path.normalizeStringBuf(buf[0..buf_i], out, false, .loose, false); diff --git a/src/watcher.zig b/src/watcher.zig index f35c16bc0..0fe05fd48 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -696,7 +696,7 @@ pub fn NewWatcher(comptime ContextType: type) type { } else if (comptime Environment.isLinux) { // var file_path_to_use_ = std.mem.trimRight(u8, file_path_, "/"); // var buf: [bun.MAX_PATH_BYTES+1]u8 = undefined; - // std.mem.copy(u8, &buf, file_path_to_use_); + // bun.copy(u8, &buf, file_path_to_use_); // buf[file_path_to_use_.len] = 0; var buf = file_path_.ptr; var slice: [:0]const u8 = buf[0..file_path_.len :0]; @@ -778,7 +778,7 @@ pub fn NewWatcher(comptime ContextType: type) type { } else if (Environment.isLinux) { var file_path_to_use_ = std.mem.trimRight(u8, file_path_, "/"); var buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - std.mem.copy(u8, &buf, file_path_to_use_); + bun.copy(u8, &buf, file_path_to_use_); buf[file_path_to_use_.len] = 0; var slice: [:0]u8 = buf[0..file_path_to_use_.len :0]; index = try INotify.watchDir(slice); diff --git a/src/which.zig b/src/which.zig index 1bf2aba46..55dd7bfce 100644 --- a/src/which.zig +++ b/src/which.zig @@ -1,9 +1,9 @@ const std = @import("std"); const bun = @import("bun"); fn isValid(buf: *[bun.MAX_PATH_BYTES]u8, segment: []const u8, bin: []const u8) ?u16 { - std.mem.copy(u8, buf, segment); + bun.copy(u8, buf, segment); buf[segment.len] = std.fs.path.sep; - std.mem.copy(u8, buf[segment.len + 1 ..], bin); + bun.copy(u8, buf[segment.len + 1 ..], bin); buf[segment.len + 1 + bin.len ..][0] = 0; const filepath = buf[0 .. segment.len + 1 + bin.len :0]; if (!checkPath(filepath)) return null; @@ -23,7 +23,7 @@ pub fn which(buf: *[bun.MAX_PATH_BYTES]u8, path: []const u8, cwd: []const u8, bi // handle absolute paths if (std.fs.path.isAbsolute(bin)) { - std.mem.copy(u8, buf, bin); + bun.copy(u8, buf, bin); buf[bin.len] = 0; var binZ: [:0]u8 = buf[0..bin.len :0]; if (checkPath(binZ)) return binZ; diff --git a/test/bun.js/install/bun-link.test.ts b/test/bun.js/install/bun-link.test.ts index 137242cc4..88e124ec2 100644 --- a/test/bun.js/install/bun-link.test.ts +++ b/test/bun.js/install/bun-link.test.ts @@ -341,6 +341,7 @@ it("should link dependency without crashing", async () => { const err4 = await new Response(stderr4).text(); expect(err4).toContain(`error: FileNotFound installing ${link_name}`); expect(stdout4).toBeDefined(); - expect(await new Response(stdout4).text()).toBe(""); + const out4 = await new Response(stdout4).text(); + expect(out4.replace(/\[[0-9\.]+m?s\]/, "[]").split(/\r?\n/)).toEqual(["Failed to install 1 packages", "[] done", ""]); expect(await exited4).toBe(0); }); |