aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile9
-rw-r--r--build.zig69
-rw-r--r--misctools/http_bench.zig4
m---------src/deps/boringssl0
-rw-r--r--src/global.zig3
-rw-r--r--src/hop/hop.zig235
-rw-r--r--src/hop/schema.zig472
-rw-r--r--src/libarchive/libarchive.zig99
-rw-r--r--src/runtime.version2
9 files changed, 864 insertions, 29 deletions
diff --git a/Makefile b/Makefile
index 504772899..c5a8f4eff 100644
--- a/Makefile
+++ b/Makefile
@@ -95,7 +95,7 @@ DEFAULT_LINKER_FLAGS =
JSC_BUILD_STEPS :=
ifeq ($(OS_NAME),linux)
JSC_BUILD_STEPS += jsc-check
-DEFAULT_LINKER_FLAGS= -lcrypto -pthread -ldl
+DEFAULT_LINKER_FLAGS= -pthread -ldl
endif
ifeq ($(OS_NAME),darwin)
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
@@ -220,7 +220,7 @@ bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release
vendor-without-check: api analytics node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive
boringssl-build:
- cd $(DEPS_DIR)/boringssl && mkdir -p build && cd build && cmake -GNinja .. && ninja
+ cd $(DEPS_DIR)/boringssl && mkdir -p build && cd build && cmake -DCMAKE_BUILD_TYPE=Release -GNinja .. && ninja
boringssl-copy:
cp $(DEPS_DIR)/boringssl/build/ssl/libssl.a $(DEPS_DIR)/libssl.a
@@ -333,9 +333,8 @@ fetch-debug:
httpbench-debug:
- cd misctools; $(ZIG) build-obj ./http_bench.zig -fcompiler-rt -lc --main-pkg-path ../ --pkg-begin io ../$(IO_FILE) --pkg-end $(BORINGSSL_PACKAGE)
- $(CXX) ./misctools/http_bench.o -g -o ./misctools/http_bench $(DEFAULT_LINKER_FLAGS) -lc \
- src/deps/mimalloc/libmimalloc.a \
+ $(ZIG) build httpbench-obj
+ $(CXX) $(DEBUG_PACKAGE_DIR)/httpbench.o -g -o ./misctools/http_bench $(DEFAULT_LINKER_FLAGS) -lc \
src/deps/zlib/libz.a \
src/deps/libarchive.a \
src/deps/libssl.a \
diff --git a/build.zig b/build.zig
index 7e9af7b78..9508fd7e2 100644
--- a/build.zig
+++ b/build.zig
@@ -94,7 +94,7 @@ fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: *std.mem.Alloc
step.addPackage(http);
step.addPackage(network_thread);
}
-
+var output_dir: []const u8 = "";
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
var file = std.fs.cwd().openFile(filepath, .{ .read = true }) catch |err| {
const linux_only = "\nOn Linux, you'll need to compile libiconv manually and copy the .a file into src/deps.";
@@ -107,6 +107,7 @@ fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
}
var x64 = "x64";
+var mode: std.builtin.Mode = undefined;
pub fn build(b: *std.build.Builder) !void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
@@ -115,7 +116,7 @@ pub fn build(b: *std.build.Builder) !void {
var target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
- const mode = b.standardReleaseOptions();
+ mode = b.standardReleaseOptions();
var cwd_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const cwd: []const u8 = b.pathFromRoot(".");
@@ -156,7 +157,7 @@ pub fn build(b: *std.build.Builder) !void {
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
- const output_dir = b.pathFromRoot(output_dir_base);
+ output_dir = b.pathFromRoot(output_dir_base);
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
if (target.getOsTag() == .wasi) {
@@ -296,10 +297,7 @@ pub fn build(b: *std.build.Builder) !void {
// if (target.getOsTag() == .macos) "-DUSE_FOUNDATION=1" else "",
// if (target.getOsTag() == .macos) "-DUSE_CF_RETAIN_PTR=1" else "",
// };
- const headers_step = b.step("headers-obj", "JSC headers Step #1");
- var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/bindgen.zig");
- headers_obj.setMainPkgPath(exe.main_pkg_path.?);
- defer headers_step.dependOn(&headers_obj.step);
+
{
b.default_step.dependOn(&b.addLog(
@@ -371,6 +369,8 @@ pub fn build(b: *std.build.Builder) !void {
}
}
+
+
{
var obj_step = b.step("obj", "Build Bun as a .o file");
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
@@ -412,22 +412,31 @@ pub fn build(b: *std.build.Builder) !void {
}
{
- headers_obj.setTarget(target);
+ const headers_step = b.step("headers-obj", "Build JavaScriptCore headers");
+ var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/bindgen.zig");
+ defer headers_step.dependOn(&headers_obj.step);
+ try configureObjectStep(headers_obj, target, exe.main_pkg_path.?);
+ }
- try addInternalPackages(headers_obj, b.allocator, target);
- addPicoHTTP(headers_obj, false);
+ {
+ const headers_step = b.step("httpbench-obj", "Build HTTPBench tool (object files)");
+ var headers_obj: *std.build.LibExeObjStep = b.addObject("httpbench", "misctools/http_bench.zig");
+ defer headers_step.dependOn(&headers_obj.step);
+ try configureObjectStep(headers_obj, target, exe.main_pkg_path.?);
+ }
- headers_obj.setOutputDir(output_dir);
- headers_obj.setBuildMode(mode);
- headers_obj.linkLibC();
- headers_obj.linkLibCpp();
- headers_obj.bundle_compiler_rt = true;
+ {
+ const headers_step = b.step("fetch-obj", "Build fetch (object files)");
+ var headers_obj: *std.build.LibExeObjStep = b.addObject("fetch", "misctools/fetch.zig");
+ defer headers_step.dependOn(&headers_obj.step);
+ try configureObjectStep(headers_obj, target, exe.main_pkg_path.?);
+ }
- if (target.getOsTag() == .linux) {
- // obj.want_lto = tar;
- headers_obj.link_emit_relocs = true;
- headers_obj.link_function_sections = true;
- }
+ {
+ const headers_step = b.step("tgz-obj", "Build tgz (object files)");
+ var headers_obj: *std.build.LibExeObjStep = b.addObject("tgz", "misctools/tgz.zig");
+ defer headers_step.dependOn(&headers_obj.step);
+ try configureObjectStep(headers_obj, target, exe.main_pkg_path.?);
}
} else {
b.default_step.dependOn(&exe.step);
@@ -470,3 +479,23 @@ pub fn build(b: *std.build.Builder) !void {
}
pub var original_make_fn: ?fn (step: *std.build.Step) anyerror!void = null;
+
+pub fn configureObjectStep(obj: *std.build.LibExeObjStep, target: anytype, main_pkg_path: []const u8) !void {
+ obj.setMainPkgPath(main_pkg_path);
+ obj.setTarget(target);
+
+ try addInternalPackages(obj, std.heap.page_allocator, target);
+ addPicoHTTP(obj, false);
+
+ obj.setOutputDir(output_dir);
+ obj.setBuildMode(mode);
+ obj.linkLibC();
+ obj.linkLibCpp();
+ obj.bundle_compiler_rt = true;
+
+ if (target.getOsTag() == .linux) {
+ // obj.want_lto = tar;
+ obj.link_emit_relocs = true;
+ obj.link_function_sections = true;
+ }
+} \ No newline at end of file
diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig
index 1bdbea39c..10965f66a 100644
--- a/misctools/http_bench.zig
+++ b/misctools/http_bench.zig
@@ -168,8 +168,8 @@ pub const Arguments = struct {
}
};
-const NetworkThread = @import("../src/http/network_thread.zig");
-const HTTP = @import("../src/http/http_client_async.zig");
+const NetworkThread = @import("network_thread");
+const HTTP = @import("http");
var stdout_: std.fs.File = undefined;
var stderr_: std.fs.File = undefined;
diff --git a/src/deps/boringssl b/src/deps/boringssl
-Subproject b3ed071ecc4efb77afd0a025ea1078da19578bf
+Subproject 4f1fae3043f22d3a2a0c7fcd7d0244cd91b60bd
diff --git a/src/global.zig b/src/global.zig
index 6793054db..3407e837b 100644
--- a/src/global.zig
+++ b/src/global.zig
@@ -1,7 +1,8 @@
const std = @import("std");
pub const Environment = @import("env.zig");
-const use_mimalloc = !Environment.isTest and Environment.isNative;
+const use_mimalloc = false;
+
pub const default_allocator: *std.mem.Allocator = if (!use_mimalloc)
std.heap.c_allocator
diff --git a/src/hop/hop.zig b/src/hop/hop.zig
new file mode 100644
index 000000000..0c5439d1d
--- /dev/null
+++ b/src/hop/hop.zig
@@ -0,0 +1,235 @@
+const std = @import("std");
+const C = @import("../c.zig");
+const Schema = @import("./schema.zig");
+const Hop = Schema.Hop;
+
+const string = []const u8;
+
+
+pub fn cmpStringsAsc(ctx: void, a: string, b: string) bool {
+ return std.mem.order(u8, a, b) == .lt;
+}
+
+pub fn cmpStringsDesc(ctx: void, a: string, b: string) bool {
+ return std.mem.order(u8, a, b) == .gt;
+}
+
+const sort_asc = std.sort.asc(u8);
+const sort_desc = std.sort.desc(u8);
+
+pub fn sortAsc(in: []string) void {
+ std.sort.sort([]const u8, in, {}, cmpStringsAsc);
+}
+
+pub fn sortDesc(in: []string) void {
+ std.sort.sort([]const u8, in, {}, cmpStringsDesc);
+}
+
+const Library = struct {
+ pub const magic_bytes = "#!/usr/bin/env hop\n\n";
+ const Header = [magic_bytes.len + 5]u8;
+
+ archive: Hop.Archive,
+ allocator: *std.mem.Allocator,
+ metadata_bytes: []u8,
+ fd: ?std.os.fd_t,
+
+ pub const Builder = struct {
+ allocator: *std.mem.Allocator,
+ files: std.ArrayListUnmanaged(Hop.File),
+ metadata_bytes: std.ArrayListUnmanaged(u8),
+ destination: std.fs.File = undefined,
+
+ pub fn init(allocator: *std.mem.Allocator) Builder {
+ return Builder{
+ .allocator = allocator,
+ .metadata_bytes = .{},
+ .files = std.ArrayListUnmanaged(Hop.File){},
+ };
+ }
+
+ pub fn start(this: *Builder, file: std.fs.File) !void {
+ this.destination = file;
+ try file.seekTo(0);
+
+ // Write the header with 0 set as the content offset
+ try file.writeAll(magic_bytes ++ [5]u8{ 0, 0, 0, 0, '\n' });
+ }
+
+ const FileSorter = struct {
+ metadata: []const u8,
+ pub fn sortByName(this: FileSorter, lhs: Hop.File, rhs: Hop.File) bool {
+ return std.mem.order(u8, this.metadata[lhs.name.off..][0..lhs.name.len], this.metadata[rhs.name.off..][0..rhs.name.len]) == .lt;
+ }
+ };
+
+ pub fn done(this: *Builder) !Hop.Archive {
+ const metadata_offset = @truncate(u32, try this.destination.getPos());
+
+ var sorter = FileSorter{
+ .metadata = this.metadata_bytes.items,
+ };
+
+ std.sort.sort(Hop.File, this.files.items, sorter, FileSorter.sortByName);
+
+ var name_hashes = try this.allocator.alloc(u32, this.files.items.len);
+
+ for (this.files.items) |file, i| {
+ name_hashes[i] = file.name_hash;
+ }
+
+ var archive = Hop.Archive{
+ .version = 1,
+ .files = this.files.items,
+ .name_hashes = name_hashes,
+ .content_offset = metadata_offset,
+ .metadata = this.metadata_bytes.items,
+ };
+
+ var schema_writer = Schema.FileWriter.init(this.destination);
+ try archive.encode(&schema_writer);
+
+ var header: Header = undefined;
+ header[0..magic_bytes.len].* = magic_bytes.*;
+ std.mem.writeIntNative(u32, header[magic_bytes.len..][0..4], metadata_offset);
+ header[magic_bytes.len..][4] = '\n';
+ try this.destination.pwriteAll(&header, 0);
+
+ _ = C.fchmod(
+ this.destination.handle,
+ // chmod 777
+ 0000010 | 0000100 | 0000001 | 0001000 | 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020,
+ );
+
+ return archive;
+ }
+
+ pub fn appendMetadata(this: *Builder, bytes: []const u8) !Hop.StringPointer {
+ const off = @truncate(u32, this.metadata_bytes.items.len);
+
+ // Keep a null ptr at the end of the metadata so that C APIs expecting sentinel ptrs work without copying
+ try this.metadata_bytes.appendSlice(this.allocator, bytes);
+ try this.metadata_bytes.append(this.allocator, 0);
+ return Hop.StringPointer{
+ .off = off,
+ .len = @truncate(u32, bytes.len),
+ };
+ }
+
+ pub fn appendContent(this: *Builder, bytes: []const u8) !Hop.StringPointer {
+ const off = try this.destination.getPos();
+ try this.destination.writeAll(bytes);
+ return Hop.StringPointer{
+ .off = off,
+ .len = bytes.len,
+ };
+ }
+
+ pub fn appendContentFromDisk(this: *Builder, name: []const u8, in: std.fs.File) !void {
+ var stat = try in.stat();
+
+ _ = try this.destination.write("\n");
+ const off_in = try this.destination.getPos();
+ const written = try std.os.copy_file_range(in.handle, 0, this.destination.handle, off_in, stat.size, 0);
+ try this.destination.seekTo(off_in + written);
+ const end = try this.destination.getPos();
+ try this.appendFileMetadata(name, off_in, end, stat);
+ try this.destination.writeAll(&[_]u8{0});
+ }
+
+ pub fn appendFileMetadata(this: *Builder, name_buf: []const u8, start_pos: u64, end_pos: u64, stat: std.fs.File.Stat) !void {
+ const name = try this.appendMetadata(name_buf);
+ try this.files.append(
+ this.allocator,
+ Hop.File{
+ .name = name,
+ .name_hash = @truncate(u32, std.hash.Wyhash.hash(0, name_buf)),
+ .data = Schema.Hop.StringPointer{ .off = @truncate(u32, start_pos), .len = @truncate(u32, end_pos - start_pos) },
+ .chmod = @truncate(u32, stat.mode),
+ .mtime = @truncate(u32, @intCast(u128, @divFloor(stat.mtime, std.time.ns_per_s))),
+ .ctime = @truncate(u32, @intCast(u128, @divFloor(stat.ctime, std.time.ns_per_s))),
+ },
+ );
+ }
+
+ pub fn appendDirectoryRecursively(this: *Builder, dir: std.fs.Dir) !void {
+ var walker = try dir.walk(this.allocator);
+ defer walker.deinit();
+ while (try walker.next()) |entry_| {
+ const entry: std.fs.Dir.Walker.WalkerEntry = entry_;
+
+ if (entry.kind != .File) continue;
+
+ try this.appendContentFromDisk(entry.path, try entry.dir.openFile(entry.basename, .{ .read = true }));
+ }
+ }
+ };
+
+ pub fn extract(this: *Library, dest: std.fs.Dir, comptime verbose: bool) !void {
+ for (this.archive.files) |file| {
+ var name_slice = this.archive.metadata[file.name.off..][0..file.name.len :0];
+
+ var out = dest.createFileZ(name_slice, .{ .truncate = true }) catch brk: {
+ if (std.fs.path.dirname(name_slice)) |dirname| {
+ dest.makePath(dirname) catch |err2| {
+ std.log.err("error: {s} Failed to mkdir {s}\n", .{ @errorName(err2), dirname });
+ continue;
+ };
+ }
+
+ break :brk dest.createFileZ(name_slice, .{ .truncate = true }) catch |err2| {
+ std.log.err("error: {s} Failed to create file: {s}\n", .{ @errorName(err2), name_slice });
+ continue;
+ };
+ };
+
+ const written = try std.os.copy_file_range(this.fd.?, file.data.off, out.handle, 0, file.data.len, 0);
+ if (verbose) {
+ std.log.info("Extracted file: {s} ({d} bytes)\n", .{ name_slice, written });
+ }
+ }
+ }
+
+ pub fn load(
+ fd: std.os.fd_t,
+ allocator: *std.mem.Allocator,
+ ) !Library {
+ var file = std.fs.File{ .handle = fd };
+
+ var header_buf: Header = std.mem.zeroes(Header);
+ var header = file.pread(&header_buf, 0) catch |err| {
+ std.log.err("Archive is corrupt. Failed to read header: {s}", .{@errorName(err)});
+ return err;
+ };
+
+ const content_offset = std.mem.readIntNative(u32, header_buf[magic_bytes.len..][0..4]);
+
+ const end = file.getEndPos() catch |err| {
+ std.log.err("Unable to get archive end position {s}", .{@errorName(err)});
+ return error.IOError;
+ };
+
+ if (content_offset == 0 or std.math.maxInt(u32) == content_offset) {
+ std.log.err("Archive is corrupt. content_offset {d} is invalid", .{content_offset});
+ return error.CorruptArchive;
+ }
+
+ if (content_offset >= end) {
+ std.log.err("Archive is corrupt. content_offset is {d} greater than end of file", .{content_offset});
+ return error.CorruptArchive;
+ }
+
+ var metadata_buf = try allocator.alloc(u8, end - content_offset);
+ var metadata = file.preadAll(metadata_buf, content_offset) catch |err| {
+ std.log.err("Error reading archive metadata {s}", .{@errorName(err)});
+ return err;
+ };
+ var reader = Schema.Reader.init(metadata_buf, allocator);
+ var archive = Hop.Archive.decode(&reader) catch |err| {
+ std.log.err("Archive is corrupt. Failed to decode archive: {s}", .{@errorName(err)});
+ return err;
+ };
+
+ return Library{ .fd = fd, .archive = archive, .allocator = allocator, .metadata_bytes = metadata_buf };
+ }
+};
diff --git a/src/hop/schema.zig b/src/hop/schema.zig
new file mode 100644
index 000000000..fd80e71f3
--- /dev/null
+++ b/src/hop/schema.zig
@@ -0,0 +1,472 @@
+const std = @import("std");
+
+pub const Reader = struct {
+ const Self = @This();
+ pub const ReadError = error{EOF};
+
+ buf: []u8,
+ remain: []u8,
+ allocator: *std.mem.Allocator,
+
+ pub fn init(buf: []u8, allocator: *std.mem.Allocator) Reader {
+ return Reader{
+ .buf = buf,
+ .remain = buf,
+ .allocator = allocator,
+ };
+ }
+
+ pub fn read(this: *Self, count: usize) ![]u8 {
+ const read_count = @minimum(count, this.remain.len);
+ if (read_count < count) {
+ return error.EOF;
+ }
+
+ var slice = this.remain[0..read_count];
+
+ this.remain = this.remain[read_count..];
+
+ return slice;
+ }
+
+ pub inline fn readAs(this: *Self, comptime T: type) !T {
+ if (!std.meta.trait.hasUniqueRepresentation(T)) {
+ @compileError(@typeName(T) ++ " must have unique representation.");
+ }
+
+ return std.mem.bytesAsValue(T, try this.read(@sizeOf(T)));
+ }
+
+ pub inline fn readByte(this: *Self) !u8 {
+ return (try this.read(1))[0];
+ }
+
+ pub fn readEnum(this: *Self, comptime Enum: type) !Enum {
+ const E = error{
+ /// An integer was read, but it did not match any of the tags in the supplied enum.
+ InvalidValue,
+ };
+ const type_info = @typeInfo(Enum).Enum;
+ const tag = try this.readInt(type_info.tag_type);
+
+ inline for (std.meta.fields(Enum)) |field| {
+ if (tag == field.value) {
+ return @field(Enum, field.name);
+ }
+ }
+
+ return E.InvalidValue;
+ }
+
+ pub inline fn readArray(this: *Self, comptime T: type) ![]const T {
+ const length = try this.readInt(u32);
+ if (length == 0) {
+ return &([_]T{});
+ }
+
+ switch (comptime T) {
+ u8 => {
+ return try this.read(length);
+ },
+ u16, u32, i8, i16, i32 => {
+ var i: u32 = 0;
+ var array = try this.allocator.alloc(T, length);
+ while (i < length) : (i += 1) {
+ array[i] = std.mem.readIntSliceNative(T, (try this.read(@sizeOf(T)))[0..@sizeOf(T)]);
+ }
+ return array;
+ },
+ [:0]const u8, []const u8 => {
+ var i: u32 = 0;
+ var array = try this.allocator.alloc(T, length);
+ while (i < length) : (i += 1) {
+ array[i] = try this.readArray(u8);
+ }
+ return array;
+ },
+ else => {
+ switch (comptime @typeInfo(T)) {
+ .Struct => |Struct| {
+ switch (Struct.layout) {
+ .Packed => {
+ const sizeof = @sizeOf(T);
+ var slice = try this.read(sizeof * length);
+ return std.mem.bytesAsSlice(T, slice);
+ },
+ else => {},
+ }
+ },
+ .Enum => |type_info| {
+ const enum_values = try this.read(length * @sizeOf(type_info.tag_type));
+ return @ptrCast([*]T, enum_values.ptr)[0..length];
+ },
+ else => {},
+ }
+
+ var i: u32 = 0;
+ var array = try this.allocator.alloc(T, length);
+ while (i < length) : (i += 1) {
+ array[i] = try this.readValue(T);
+ }
+
+ return array;
+ },
+ }
+ }
+
+ pub inline fn readByteArray(this: *Self) ![]u8 {
+ const length = try this.readInt(u32);
+ if (length == 0) {
+ return &([_]u8{});
+ }
+
+ return try this.read(@as(usize, length));
+ }
+
+ pub inline fn readInt(this: *Self, comptime T: type) !T {
+ var slice = try this.read(@sizeOf(T));
+
+ return std.mem.readIntSliceNative(T, slice);
+ }
+
+ pub inline fn readBool(this: *Self) !bool {
+ return (try this.readByte()) > 0;
+ }
+
+ pub inline fn readValue(this: *Self, comptime T: type) !T {
+ switch (comptime T) {
+ bool => {
+ return try this.readBool();
+ },
+ u8 => {
+ return try this.readByte();
+ },
+ [*:0]const u8, [:0]const u8, []const u8 => {
+ return try this.readArray(u8);
+ },
+
+ []const [:0]const u8, []const [*:0]const u8, []const []const u8 => {
+ return try this.readArray([]const u8);
+ },
+ []u8, [:0]u8, [*:0]u8 => {
+ return try this.readArray([]u8);
+ },
+ u16, u32, i8, i16, i32 => {
+ return std.mem.readIntSliceNative(T, try this.read(@sizeOf(T)));
+ },
+ else => {
+ switch (comptime @typeInfo(T)) {
+ .Struct => |Struct| {
+ switch (Struct.layout) {
+ .Packed => {
+ const sizeof = @sizeOf(T);
+ var slice = try this.read(sizeof);
+ return @ptrCast(*T, slice[0..sizeof]).*;
+ },
+ else => {},
+ }
+ },
+ .Enum => |type_info| {
+ return try this.readEnum(T);
+ },
+ else => {},
+ }
+
+ return try T.decode(this);
+ },
+ }
+
+ @compileError("Invalid type passed to readValue");
+ }
+};
+
+pub fn Writer(comptime WritableStream: type) type {
+ return struct {
+ const Self = @This();
+ writable: WritableStream,
+
+ pub fn init(writable: WritableStream) Self {
+ return Self{ .writable = writable };
+ }
+
+ pub inline fn write(this: *Self, bytes: anytype) !void {
+ _ = try this.writable.write(bytes);
+ }
+
+ pub inline fn writeByte(this: *Self, byte: u8) !void {
+ _ = try this.writable.write(&[1]u8{byte});
+ }
+
+ pub inline fn writeInt(this: *Self, int: anytype) !void {
+ try this.write(std.mem.asBytes(&int));
+ }
+
+ pub inline fn writeFieldID(this: *Self, comptime id: comptime_int) !void {
+ try this.writeByte(id);
+ }
+
+ pub inline fn writeEnum(this: *Self, val: anytype) !void {
+ try this.writeInt(@enumToInt(val));
+ }
+
+ pub fn writeValue(this: *Self, comptime SliceType: type, slice: SliceType) !void {
+ switch (SliceType) {
+ []u16,
+ []u32,
+ []i16,
+ []i32,
+ []i8,
+ []const u16,
+ []const u32,
+ []const i16,
+ []const i32,
+ []const i8,
+ [:0]u16,
+ [:0]u32,
+ [:0]i16,
+ [:0]i32,
+ [:0]i8,
+ [:0]const u16,
+ [:0]const u32,
+ [:0]const i16,
+ [:0]const i32,
+ [:0]const i8,
+ [*:0]u16,
+ [*:0]u32,
+ [*:0]i16,
+ [*:0]i32,
+ [*:0]i8,
+ [*:0]const u16,
+ [*:0]const u32,
+ [*:0]const i16,
+ [*:0]const i32,
+ [*:0]const i8,
+ => {
+ try this.writeArray(SliceType, slice);
+ },
+
+ []u8,
+ []const u8,
+ [:0]u8,
+ [:0]const u8,
+ [*:0]u8,
+ [*:0]const u8,
+ => {
+ try this.writeArray(u8, slice);
+ },
+
+ u8 => {
+ try this.write(slice);
+ },
+ u16, u32, i16, i32, i8 => {
+ try this.write(std.mem.asBytes(slice));
+ },
+
+ else => {
+ try slice.encode(this);
+ },
+ }
+ }
+
+ pub inline fn writeArray(this: *Self, comptime T: type, slice: anytype) !void {
+ try this.writeInt(@truncate(u32, slice.len));
+
+ switch (T) {
+ u8 => {
+ try this.write(slice);
+ },
+ u16, u32, i16, i32, i8 => {
+ try this.write(std.mem.sliceAsBytes(slice));
+ },
+ [:0]u8,
+ []u8,
+ []u16,
+ []u32,
+ []i16,
+ []i32,
+ []i8,
+ []const u8,
+ [:0]const u8,
+ []const u16,
+ []const u32,
+ []const i16,
+ []const i32,
+ []const i8,
+ [:0]u16,
+ [:0]u32,
+ [:0]i16,
+ [:0]i32,
+ [:0]i8,
+ [:0]const u16,
+ [:0]const u32,
+ [:0]const i16,
+ [:0]const i32,
+ [:0]const i8,
+ [*:0]u16,
+ [*:0]u32,
+ [*:0]i16,
+ [*:0]i32,
+ [*:0]i8,
+ [*:0]const u16,
+ [*:0]const u32,
+ [*:0]const i16,
+ [*:0]const i32,
+ [*:0]const i8,
+ => {
+ for (slice) |num_slice| {
+ try this.writeArray(std.meta.Child(@TypeOf(num_slice)), num_slice);
+ }
+ },
+ else => {
+ for (slice) |val| {
+ try val.encode(this);
+ }
+ },
+ }
+ }
+
+ pub inline fn endMessage(this: *Self) !void {
+ try this.writeByte(0);
+ }
+ };
+}
+
+pub const ByteWriter = Writer(*std.io.FixedBufferStream([]u8));
+pub const FileWriter = Writer(std.fs.File);
+
+pub const Hop = struct {
+ pub const StringPointer = packed struct {
+ /// off
+ off: u32 = 0,
+
+ /// len
+ len: u32 = 0,
+
+ pub fn decode(reader: anytype) anyerror!StringPointer {
+ var this = std.mem.zeroes(StringPointer);
+
+ this.off = try reader.readValue(u32);
+ this.len = try reader.readValue(u32);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeInt(this.off);
+ try writer.writeInt(this.len);
+ }
+ };
+
+ pub const File = packed struct {
+ /// name
+ name: StringPointer,
+
+ /// name_hash
+ name_hash: u32 = 0,
+
+ /// chmod
+ chmod: u32 = 0,
+
+ /// mtime
+ mtime: u32 = 0,
+
+ /// ctime
+ ctime: u32 = 0,
+
+ /// data
+ data: StringPointer,
+
+ pub fn decode(reader: anytype) anyerror!File {
+ var this = File{ .name = StringPointer{}, .data = .{} };
+
+ this.name = try reader.readValue(StringPointer);
+ this.name_hash = try reader.readValue(u32);
+ this.chmod = try reader.readValue(u32);
+ this.mtime = try reader.readValue(u32);
+ this.ctime = try reader.readValue(u32);
+ this.data = try reader.readValue(StringPointer);
+ return this;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ try writer.writeValue(@TypeOf(this.name), this.name);
+ try writer.writeInt(this.name_hash);
+ try writer.writeInt(this.chmod);
+ try writer.writeInt(this.mtime);
+ try writer.writeInt(this.ctime);
+ try writer.writeValue(@TypeOf(this.data), this.data);
+ }
+ };
+
+ pub const Archive = struct {
+ /// version
+ version: ?u32 = null,
+
+ /// content_offset
+ content_offset: ?u32 = null,
+
+ /// files
+ files: []align(1) const File,
+
+ /// name_hashes
+ name_hashes: []align(1) const u32,
+
+ /// metadata
+ metadata: []align(1) const u8,
+
+ pub fn decode(reader: anytype) anyerror!Archive {
+ var this = std.mem.zeroes(Archive);
+
+ while (true) {
+ switch (try reader.readByte()) {
+ 0 => {
+ return this;
+ },
+
+ 1 => {
+ this.version = try reader.readValue(u32);
+ },
+ 2 => {
+ this.content_offset = try reader.readValue(u32);
+ },
+ 3 => {
+ this.files = try reader.readArray(File);
+ },
+ 4 => {
+ this.name_hashes = try reader.readArray(u32);
+ },
+ 5 => {
+ this.metadata = try reader.readArray(u8);
+ },
+ else => {
+ return error.InvalidMessage;
+ },
+ }
+ }
+ unreachable;
+ }
+
+ pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
+ if (this.version) |version| {
+ try writer.writeFieldID(1);
+ try writer.writeInt(version);
+ }
+ if (this.content_offset) |content_offset| {
+ try writer.writeFieldID(2);
+ try writer.writeInt(content_offset);
+ }
+ if (this.files.len > 0) {
+ try writer.writeFieldID(3);
+ try writer.writeArray(File, this.files);
+ }
+ if (this.name_hashes.len > 0) {
+ try writer.writeFieldID(4);
+ try writer.writeArray(u32, this.name_hashes);
+ }
+ if (this.metadata.len > 0) {
+ try writer.writeFieldID(5);
+ try writer.writeArray(u8, this.metadata);
+ }
+ try writer.endMessage();
+ }
+ };
+};
diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig
index 6c804dfd1..1ab109068 100644
--- a/src/libarchive/libarchive.zig
+++ b/src/libarchive/libarchive.zig
@@ -3,6 +3,7 @@
const lib = @import("./libarchive-bindings.zig");
usingnamespace @import("../global.zig");
const std = @import("std");
+const Hop = @import("../hop/hop.zig").Library;
const struct_archive = lib.struct_archive;
pub const Seek = enum(c_int) {
set = std.os.SEEK_SET,
@@ -522,6 +523,104 @@ pub const Archive = struct {
}
}
+ pub fn convertToHop(
+ hop: *Hop.Builder,
+ file_buffer: []const u8,
+ ctx: ?*Archive.Context,
+ comptime FilePathAppender: type,
+ appender: FilePathAppender,
+ comptime depth_to_skip: usize,
+ comptime close_handles: bool,
+ comptime log: bool,
+ ) !u32 {
+ var entry: *lib.archive_entry = undefined;
+ var ext: *lib.archive = undefined;
+
+ const flags = @enumToInt(Flags.Extract.time) | @enumToInt(Flags.Extract.perm) | @enumToInt(Flags.Extract.acl) | @enumToInt(Flags.Extract.fflags);
+ var stream: BufferReadStream = undefined;
+ stream.init(file_buffer);
+ defer stream.deinit();
+ _ = stream.openRead();
+ var archive = stream.archive;
+ var count: u32 = 0;
+
+ loop: while (true) {
+ const r = @intToEnum(Status, lib.archive_read_next_header(archive, &entry));
+
+ switch (r) {
+ Status.eof => break :loop,
+ Status.failed, Status.fatal, Status.retry => return error.Fail,
+ else => {
+ var pathname: [:0]const u8 = std.mem.sliceTo(lib.archive_entry_pathname(entry).?, 0);
+ var tokenizer = std.mem.tokenize(u8, std.mem.span(pathname), std.fs.path.sep_str);
+ comptime var depth_i: usize = 0;
+
+ inline while (depth_i < depth_to_skip) : (depth_i += 1) {
+ if (tokenizer.next() == null) continue :loop;
+ }
+
+ var pathname_ = tokenizer.rest();
+ pathname = @intToPtr([*]const u8, @ptrToInt(pathname_.ptr))[0..pathname_.len :0];
+
+ const mask = lib.archive_entry_filetype(entry);
+ const size = @intCast(usize, std.math.max(lib.archive_entry_size(entry), 0));
+ if (size > 0) {
+ const slice = std.mem.span(pathname);
+
+ if (comptime log) {
+ Output.prettyln(" {s}", .{pathname});
+ }
+
+ const file = dir.createFileZ(pathname, .{ .truncate = true }) catch |err| brk: {
+ switch (err) {
+ error.FileNotFound => {
+ try dir.makePath(std.fs.path.dirname(slice) orelse return err);
+ break :brk try dir.createFileZ(pathname, .{ .truncate = true });
+ },
+ else => {
+ return err;
+ },
+ }
+ };
+ count += 1;
+
+ _ = C.fchmod(file.handle, lib.archive_entry_perm(entry));
+
+ if (ctx) |ctx_| {
+ const hash: u64 = if (ctx_.pluckers.len > 0)
+ std.hash.Wyhash.hash(0, slice)
+ else
+ @as(u64, 0);
+
+ if (comptime FilePathAppender != void) {
+ var result = ctx.?.all_files.getOrPutAdapted(hash, Context.U64Context{}) catch unreachable;
+ if (!result.found_existing) {
+ result.value_ptr.* = (try appender.appendMutable(@TypeOf(slice), slice)).ptr;
+ }
+ }
+
+ for (ctx_.pluckers) |*plucker_| {
+ if (plucker_.filename_hash == hash) {
+ try plucker_.contents.inflate(size);
+ plucker_.contents.list.expandToCapacity();
+ var read = lib.archive_read_data(archive, plucker_.contents.list.items.ptr, size);
+ try plucker_.contents.inflate(@intCast(usize, read));
+ plucker_.found = read > 0;
+ plucker_.fd = file.handle;
+ continue :loop;
+ }
+ }
+ }
+
+ _ = lib.archive_read_data_into_fd(archive, file.handle);
+ }
+ },
+ }
+ }
+
+ return count;
+ }
+
pub fn extractToDisk(
file_buffer: []const u8,
root: []const u8,
diff --git a/src/runtime.version b/src/runtime.version
index 936ae3d67..ac5eaa983 100644
--- a/src/runtime.version
+++ b/src/runtime.version
@@ -1 +1 @@
-b79c80cf594c185e \ No newline at end of file
+6634fea9dc3eb4e6 \ No newline at end of file