diff options
author | 2023-05-07 21:08:32 -0700 | |
---|---|---|
committer | 2023-05-07 21:08:32 -0700 | |
commit | af1cd23b8cee839f1e5421e12170908272cb49ed (patch) | |
tree | e44062352c64512577a7b2a4ae13e576a3ce947e /src | |
parent | d3ae9ac7ce76c3a7ff42d131baa3304868ccd2b6 (diff) | |
download | bun-af1cd23b8cee839f1e5421e12170908272cb49ed.tar.gz bun-af1cd23b8cee839f1e5421e12170908272cb49ed.tar.zst bun-af1cd23b8cee839f1e5421e12170908272cb49ed.zip |
Add tracy (#2817)
* Add tracy
* RTLD
* Linux needs LD_PRELOAD
* More tracing
* Inline this
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Diffstat (limited to 'src')
-rw-r--r-- | src/bun.js/api/JSBundler.zig | 9 | ||||
-rw-r--r-- | src/bun.zig | 3 | ||||
-rw-r--r-- | src/bundler/bundle_v2.zig | 170 | ||||
-rw-r--r-- | src/js_ast.zig | 2 | ||||
-rw-r--r-- | src/js_parser.zig | 29 | ||||
-rw-r--r-- | src/js_printer.zig | 3 | ||||
-rw-r--r-- | src/tracy.zig | 548 |
7 files changed, 718 insertions, 46 deletions
diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index f65ecd47e..cccbca9db 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -760,6 +760,9 @@ pub const JSBundler = struct { is_onLoad: bool, ) bool { JSC.markBinding(@src()); + const tracer = bun.tracy.traceNamed(@src(), "JSBundler.hasAnyMatches"); + defer tracer.end(); + const namespace_string = if (path.isFile()) ZigString.Empty else @@ -777,6 +780,8 @@ pub const JSBundler = struct { default_loader: options.Loader, ) void { JSC.markBinding(@src()); + const tracer = bun.tracy.traceNamed(@src(), "JSBundler.matchOnLoad"); + defer tracer.end(); const namespace_string = if (namespace.len == 0) ZigString.init("file") else @@ -795,6 +800,8 @@ pub const JSBundler = struct { import_record_kind: bun.ImportKind, ) void { JSC.markBinding(@src()); + const tracer = bun.tracy.traceNamed(@src(), "JSBundler.matchOnResolve"); + defer tracer.end(); const namespace_string = if (strings.eqlComptime(namespace, "file")) ZigString.Empty else @@ -809,6 +816,8 @@ pub const JSBundler = struct { object: JSC.JSValue, ) JSValue { JSC.markBinding(@src()); + const tracer = bun.tracy.traceNamed(@src(), "JSBundler.addPlugin"); + defer tracer.end(); return JSBundlerPlugin__runSetupFunction(this, object); } diff --git a/src/bun.zig b/src/bun.zig index aacd88540..d3da6126f 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1465,3 +1465,6 @@ pub fn HiveRef(comptime T: type, comptime capacity: u16) type { } pub const MaxHeapAllocator = @import("./max_heap_allocator.zig").MaxHeapAllocator; + +pub const tracy = @import("./tracy.zig"); +pub const trace = tracy.trace; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 47ed7b431..d572d97a9 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -128,6 +128,10 @@ const JSC = bun.JSC; const debugTreeShake = Output.scoped(.TreeShake, true); const BitSet = bun.bit_set.DynamicBitSetUnmanaged; +fn tracer(comptime src: std.builtin.SourceLocation, comptime name: [*:0]const u8) bun.tracy.Ctx { + return bun.tracy.traceNamed(src, "Bundler." ++ name); +} + pub const ThreadPool = struct { pool: *ThreadPoolLib = undefined, workers_assignments: std.AutoArrayHashMap(std.Thread.Id, *Worker) = std.AutoArrayHashMap(std.Thread.Id, *Worker).init(bun.default_allocator), @@ -167,6 +171,9 @@ pub const ThreadPool = struct { } pub fn getWorker(this: *ThreadPool, id: std.Thread.Id) *Worker { + const trace = tracer(@src(), "getWorker"); + defer trace.end(); + var worker: *Worker = undefined; { this.workers_assignments_lock.lock(); @@ -266,10 +273,14 @@ pub const ThreadPool = struct { } fn create(this: *Worker, ctx: *BundleV2) void { + const trace = tracer(@src(), "Worker.create"); + defer trace.end(); + this.has_created = true; Output.Source.configureThread(); this.heap = ThreadlocalArena.init() catch unreachable; this.allocator = this.heap.allocator(); + var allocator = this.allocator; this.ast_memory_allocator = .{ .allocator = this.allocator }; @@ -336,6 +347,9 @@ pub const BundleV2 = struct { } pub fn findReachableFiles(this: *BundleV2) ![]Index { + const trace = tracer(@src(), "findReachableFiles"); + defer trace.end(); + const Visitor = struct { reachable: std.ArrayList(Index), visited: bun.bit_set.DynamicBitSet = undefined, @@ -662,6 +676,23 @@ pub const BundleV2 = struct { thread_pool: ?*ThreadPoolLib, heap: ?ThreadlocalArena, ) !*BundleV2 { + tracy: { + if (bundler.env.get("BUN_TRACY") != null) { + if (!bun.tracy.init()) { + Output.prettyErrorln("Failed to load Tracy. Is it installed in your include path?", .{}); + Output.flush(); + break :tracy; + } + + bun.tracy.start(); + + if (!bun.tracy.isConnected()) { + Output.prettyErrorln("Tracy is not connected. Is Tracy running on your computer?", .{}); + Output.flush(); + break :tracy; + } + } + } var generator = try allocator.create(BundleV2); bundler.options.mark_builtins_as_external = bundler.options.target.isBun() or bundler.options.target == .node; bundler.resolver.opts.mark_builtins_as_external = bundler.options.target.isBun() or bundler.options.target == .node; @@ -778,6 +809,8 @@ pub const BundleV2 = struct { } fn cloneAST(this: *BundleV2) !void { + const trace = tracer(@src(), "cloneAST"); + defer trace.end(); this.linker.allocator = this.bundler.allocator; this.linker.graph.allocator = this.bundler.allocator; this.linker.graph.ast = try this.graph.ast.clone(this.linker.allocator); @@ -792,6 +825,8 @@ pub const BundleV2 = struct { } pub fn enqueueShadowEntryPoints(this: *BundleV2) !void { + const trace = tracer(@src(), "enqueueShadowEntryPoints"); + defer trace.end(); const allocator = this.graph.allocator; // TODO: make this not slow @@ -1707,6 +1742,8 @@ pub const BundleV2 = struct { } pub fn onParseTaskComplete(parse_result: *ParseTask.Result, this: *BundleV2) void { + const trace = tracer(@src(), "onParseTaskComplete"); + defer trace.end(); defer bun.default_allocator.destroy(parse_result); var graph = &this.graph; @@ -2045,6 +2082,8 @@ pub const ParseTask = struct { ) !js_ast.Ast { switch (loader) { .jsx, .tsx, .js, .ts => { + const trace = tracer(@src(), "ParseJS"); + defer trace.end(); return if (try resolver.caches.js.parse( bundler.allocator, opts, @@ -2057,10 +2096,14 @@ pub const ParseTask = struct { try getEmptyAST(log, bundler, opts, allocator, source); }, .json => { + const trace = tracer(@src(), "ParseJSON"); + defer trace.end(); const root = (try resolver.caches.json.parseJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?; }, .toml => { + const trace = tracer(@src(), "ParseTOML"); + defer trace.end(); const root = try TOML.parse(&source, log, allocator); return (try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?; }, @@ -2107,6 +2150,8 @@ pub const ParseTask = struct { var entry: CacheEntry = switch (task.contents_or_fd) { .fd => brk: { + const trace = tracer(@src(), "readFile"); + defer trace.end(); if (bundler.options.framework) |framework| { if (framework.override_modules_hashes.len > 0) { const package_relative_path_hash = wyhash(0, file_path.pretty); @@ -2257,6 +2302,8 @@ pub const ParseTask = struct { } resolution: { + const trace = tracer(@src(), "resolve"); + defer trace.end(); if (task.presolved_source_indices.len > 0) { for (ast.import_records.slice(), task.presolved_source_indices) |*record, source_index| { if (record.is_unused or record.is_internal) @@ -3463,6 +3510,8 @@ const LinkerContext = struct { use_directive_entry_points: UseDirective.List, reachable: []Index, ) !void { + const trace = tracer(@src(), "CloneLinkerGraph"); + defer trace.end(); this.parse_graph = &bundle.graph; this.graph.code_splitting = bundle.bundler.options.code_splitting; @@ -3581,6 +3630,9 @@ const LinkerContext = struct { this: *LinkerContext, unique_key: u64, ) ![]Chunk { + const trace = tracer(@src(), "computeChunks"); + defer trace.end(); + var stack_fallback = std.heap.stackFallback(4096, this.allocator); var stack_all = stack_fallback.get(); var arena = std.heap.ArenaAllocator.init(stack_all); @@ -3716,6 +3768,9 @@ const LinkerContext = struct { } pub fn findAllImportedPartsInJSOrder(this: *LinkerContext, temp_allocator: std.mem.Allocator, chunks: []Chunk) !void { + const trace = tracer(@src(), "findAllImportedPartsInJSOrder"); + defer trace.end(); + var part_ranges_shared = std.ArrayList(PartRange).init(temp_allocator); var parts_prefix_shared = std.ArrayList(PartRange).init(temp_allocator); defer part_ranges_shared.deinit(); @@ -4104,6 +4159,8 @@ const LinkerContext = struct { } pub fn scanImportsAndExports(this: *LinkerContext) !void { + const outer_trace = tracer(@src(), "scanImportsAndExports"); + defer outer_trace.end(); const reachable = this.graph.reachable_files; const output_format = this.options.output_format; { @@ -4125,6 +4182,8 @@ const LinkerContext = struct { // Step 1: Figure out what modules must be CommonJS for (reachable) |source_index_| { + const trace = tracer(@src(), "FigureOutCommonJS"); + defer trace.end(); const id = source_index_.get(); // does it have a JS AST? @@ -4238,6 +4297,8 @@ const LinkerContext = struct { // bundle time. { + const trace = tracer(@src(), "WrapDependencies"); + defer trace.end(); var dependency_wrapper = DependencyWrapper{ .linker = this, .flags = flags, @@ -4287,6 +4348,8 @@ const LinkerContext = struct { // are ignored for those modules. { var export_star_ctx: ?ExportStarContext = null; + const trace = tracer(@src(), "ResolveExportStarStatements"); + defer trace.end(); defer { if (export_star_ctx) |*export_ctx| { export_ctx.source_index_stack.deinit(); @@ -4348,6 +4411,8 @@ const LinkerContext = struct { // export stars because imports can bind to export star re-exports. { this.cycle_detector.clearRetainingCapacity(); + const trace = tracer(@src(), "MatchImportsWithExports"); + defer trace.end(); var wrapper_part_indices = this.graph.meta.items(.wrapper_part_index); var imports_to_bind = this.graph.meta.items(.imports_to_bind); var to_mark_as_esm_with_dynamic_fallback = std.AutoArrayHashMap(u32, void).init(this.allocator); @@ -4428,7 +4493,8 @@ const LinkerContext = struct { // parts that declare the export to all parts that use the import. Also // generate wrapper parts for wrapped files. { - + const trace = tracer(@src(), "BindImportsToExports"); + defer trace.end(); // const needs_export_symbol_from_runtime: []const bool = this.graph.meta.items(.needs_export_symbol_from_runtime); var runtime_export_symbol_ref: Ref = Ref.None; @@ -5144,6 +5210,8 @@ const LinkerContext = struct { /// imported using an import star statement. pub fn doStep5(c: *LinkerContext, source_index_: Index, _: usize) void { const source_index = source_index_.get(); + const trace = tracer(@src(), "CreateNamespaceExports"); + defer trace.end(); const id = source_index; if (id > c.graph.meta.len) return; @@ -5338,6 +5406,9 @@ const LinkerContext = struct { } pub fn treeShakingAndCodeSplitting(c: *LinkerContext) !void { + const trace = tracer(@src(), "treeShakingAndCodeSplitting"); + defer trace.end(); + var parts = c.graph.ast.items(.parts); var import_records = c.graph.ast.items(.import_records); var side_effects = c.parse_graph.input_files.items(.side_effects); @@ -5345,42 +5416,51 @@ const LinkerContext = struct { const entry_points = c.graph.entry_points.items(.source_index); var distances = c.graph.files.items(.distance_from_entry_point); - // Tree shaking: Each entry point marks all files reachable from itself - for (entry_points) |entry_point| { - c.markFileLiveForTreeShaking( - entry_point, - side_effects, - parts, - import_records, - entry_point_kinds, - ); + { + const trace2 = tracer(@src(), "markFileLiveForTreeShaking"); + defer trace2.end(); + // Tree shaking: Each entry point marks all files reachable from itself + for (entry_points) |entry_point| { + c.markFileLiveForTreeShaking( + entry_point, + side_effects, + parts, + import_records, + entry_point_kinds, + ); + } } - var file_entry_bits: []AutoBitSet = c.graph.files.items(.entry_bits); - // AutoBitSet needs to be initialized if it is dynamic - if (AutoBitSet.needsDynamic(entry_points.len)) { - for (file_entry_bits) |*bits| { - bits.* = try AutoBitSet.initEmpty(c.allocator, entry_points.len); + { + const trace2 = tracer(@src(), "markFileReachableForCodeSplitting"); + defer trace2.end(); + + var file_entry_bits: []AutoBitSet = c.graph.files.items(.entry_bits); + // AutoBitSet needs to be initialized if it is dynamic + if (AutoBitSet.needsDynamic(entry_points.len)) { + for (file_entry_bits) |*bits| { + bits.* = try AutoBitSet.initEmpty(c.allocator, entry_points.len); + } + } else if (file_entry_bits.len > 0) { + // assert that the tag is correct + std.debug.assert(file_entry_bits[0] == .static); } - } else if (file_entry_bits.len > 0) { - // assert that the tag is correct - std.debug.assert(file_entry_bits[0] == .static); - } - // Code splitting: Determine which entry points can reach which files. This - // has to happen after tree shaking because there is an implicit dependency - // between live parts within the same file. All liveness has to be computed - // first before determining which entry points can reach which files. - for (entry_points, 0..) |entry_point, i| { - c.markFileReachableForCodeSplitting( - entry_point, - i, - distances, - 0, - parts, - import_records, - file_entry_bits, - ); + // Code splitting: Determine which entry points can reach which files. This + // has to happen after tree shaking because there is an implicit dependency + // between live parts within the same file. All liveness has to be computed + // first before determining which entry points can reach which files. + for (entry_points, 0..) |entry_point, i| { + c.markFileReachableForCodeSplitting( + entry_point, + i, + distances, + 0, + parts, + import_records, + file_entry_bits, + ); + } } } @@ -5840,6 +5920,8 @@ const LinkerContext = struct { chunk: *Chunk, files_in_order: []const u32, ) !renamer.Renamer { + const trace = tracer(@src(), "renameSymbolsInChunk"); + defer trace.end(); const all_module_scopes = c.graph.ast.items(.module_scope); const all_flags: []const JSMeta.Flags = c.graph.meta.items(.flags); const all_parts: []const js_ast.Part.List = c.graph.ast.items(.parts); @@ -6108,6 +6190,9 @@ const LinkerContext = struct { } fn generateCompileResultForJSChunk_(worker: *ThreadPool.Worker, c: *LinkerContext, chunk: *Chunk, part_range: PartRange) CompileResult { + const trace = tracer(@src(), "generateCodeForFileInChunkJS"); + defer trace.end(); + var arena = &worker.temporary_arena; var buffer_writer = js_printer.BufferWriter.init(worker.allocator) catch unreachable; defer _ = arena.reset(.retain_capacity); @@ -6142,6 +6227,9 @@ const LinkerContext = struct { // This runs after we've already populated the compile results fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chunk: *Chunk, chunk_index: usize) !void { + const trace = tracer(@src(), "postProcessJSChunk"); + defer trace.end(); + _ = chunk_index; const allocator = worker.allocator; const c = ctx.c; @@ -6441,6 +6529,8 @@ const LinkerContext = struct { can_have_shifts: bool, ) !sourcemap.SourceMapPieces { std.debug.assert(results.len > 0); + const trace = tracer(@src(), "generateSourceMapForChunk"); + defer trace.end(); var j = Joiner{}; const sources = c.parse_graph.input_files.items(.source); @@ -6561,6 +6651,9 @@ const LinkerContext = struct { } pub fn generateIsolatedHash(c: *LinkerContext, chunk: *const Chunk) u64 { + const trace = tracer(@src(), "generateIsolatedHash"); + defer trace.end(); + var hasher = ContentHasher{}; // Mix the file names and part ranges of all of the files in this chunk into @@ -7827,6 +7920,7 @@ const LinkerContext = struct { allocator: std.mem.Allocator, temp_allocator: std.mem.Allocator, ) js_printer.PrintResult { + // var file = &c.graph.files.items(.input_file)[part.source_index.get()]; var parts: []js_ast.Part = c.graph.ast.items(.parts)[part_range.source_index.get()].slice()[part_range.part_index_begin..part_range.part_index_end]; // const resolved_exports: []ResolvedExports = c.graph.meta.items(.resolved_exports); @@ -8365,6 +8459,9 @@ const LinkerContext = struct { }; pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk) !std.ArrayList(options.OutputFile) { + const trace = tracer(@src(), "generateChunksInParallel"); + defer trace.end(); + { debug(" START {d} renamers", .{chunks.len}); defer debug(" DONE {d} renamers", .{chunks.len}); @@ -8691,6 +8788,8 @@ const LinkerContext = struct { react_client_components_manifest: []const u8, output_files: *std.ArrayList(options.OutputFile), ) !void { + const trace = tracer(@src(), "writeOutputFilesToDisk"); + defer trace.end(); var root_dir = std.fs.cwd().makeOpenPathIterable(root_path, .{}) catch |err| { c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "{s} opening outdir {}", .{ @errorName(err), @@ -8755,6 +8854,8 @@ const LinkerContext = struct { var pathbuf: [bun.MAX_PATH_BYTES]u8 = undefined; for (chunks) |*chunk| { + const trace2 = tracer(@src(), "writeChunkToDisk"); + defer trace2.end(); defer max_heap_allocator.reset(); var rel_path = chunk.final_rel_path; @@ -10049,6 +10150,9 @@ const LinkerContext = struct { j: *bun.Joiner, count: u32, ) !Chunk.IntermediateOutput { + const trace = tracer(@src(), "breakOutputIntoPieces"); + defer trace.end(); + if (!j.contains(c.unique_key_prefix)) // There are like several cases that prohibit this from being checked more trivially, example: // 1. dynamic imports diff --git a/src/js_ast.zig b/src/js_ast.zig index b01bb36ca..939a70b73 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1317,6 +1317,8 @@ pub const Symbol = struct { } pub fn followAll(symbols: *Map) void { + const trace = bun.tracy.traceNamed(@src(), "Symbols.followAll"); + defer trace.end(); for (symbols.symbols_for_source.slice()) |list| { for (list.slice()) |*symbol| { if (!symbol.hasLink()) continue; diff --git a/src/js_parser.zig b/src/js_parser.zig index d061a0cdb..5993ea858 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -2859,6 +2859,7 @@ pub const Parser = struct { // Parse the file in the first pass, but do not bind symbols var opts = ParseStatementOptions{ .is_module_scope = true }; + const parse_tracer = bun.tracy.traceNamed(@src(), "JSParser.parse"); // Parsing seems to take around 2x as much time as visiting. // Which makes sense. @@ -2866,6 +2867,8 @@ pub const Parser = struct { // June 4: "Rest of this took: 8003000" const stmts = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts); + parse_tracer.end(); + // Halt parsing right here if there were any errors // This fixes various conditions that would cause crashes due to the AST being in an invalid state while visiting // In a number of situations, we continue to parsing despite errors so that we can report more errors to the user @@ -2875,6 +2878,7 @@ pub const Parser = struct { return error.SyntaxError; } + const visit_tracer = bun.tracy.traceNamed(@src(), "JSParser.visit"); try p.prepareForVisitPass(); // ESM is always strict mode. I don't think we need this. @@ -3020,11 +3024,16 @@ pub const Parser = struct { } } + visit_tracer.end(); + // If there were errors while visiting, also halt here if (self.log.errors > orig_error_count) { return error.SyntaxError; } + const postvisit_tracer = bun.tracy.traceNamed(@src(), "JSParser.postvisit"); + defer postvisit_tracer.end(); + const uses_dirname = p.symbols.items[p.dirname_ref.innerIndex()].use_count_estimate > 0; const uses_filename = p.symbols.items[p.filename_ref.innerIndex()].use_count_estimate > 0; @@ -5703,6 +5712,7 @@ fn NewParser_( } } + /// This function is very very hot. pub fn handleIdentifier(p: *P, loc: logger.Loc, ident: E.Identifier, _original_name: ?string, opts: IdentifierOpts) Expr { const ref = ident.ref; @@ -5761,7 +5771,12 @@ fn NewParser_( return p.newExpr(_ident, loc); } - return p.newExpr(ident, loc); + return Expr{ + .data = .{ + .e_identifier = ident, + }, + .loc = loc, + }; } pub fn generateImportStmt( @@ -13946,18 +13961,6 @@ fn NewParser_( return error.SyntaxError; } - fn jsxRefToMemberExpression(p: *P, loc: logger.Loc, ref: Ref) Expr { - p.recordUsage(ref); - return p.handleIdentifier( - loc, - E.Identifier{ - .ref = ref, - .can_be_removed_if_unused = true, - .call_can_be_unwrapped_if_unused = true, - }, - ); - } - fn jsxStringsToMemberExpression(p: *P, loc: logger.Loc, parts: []const []const u8) !Expr { const result = try p.findSymbol(loc, parts[0]); diff --git a/src/js_printer.zig b/src/js_printer.zig index ea289eeed..14d6321d2 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -5810,6 +5810,9 @@ pub fn print( renamer: bun.renamer.Renamer, comptime generate_source_maps: bool, ) PrintResult { + const trace = bun.tracy.traceNamed(@src(), "JSPrinter.print"); + defer trace.end(); + var buffer_writer = BufferWriter.init(allocator) catch |err| return .{ .err = err }; var buffer_printer = BufferPrinter.init(buffer_writer); diff --git a/src/tracy.zig b/src/tracy.zig new file mode 100644 index 000000000..3563bd0e9 --- /dev/null +++ b/src/tracy.zig @@ -0,0 +1,548 @@ +/// https://github.com/wolfpld/tracy +/// To use this module, you must have Tracy installed on your system. +/// On macOS, you can install it with `brew install tracy`. +/// +/// This file is based on the code from Zig's transpiler source. +/// Thank you to the Zig team +/// +const std = @import("std"); +const builtin = @import("builtin"); +const build_options = @import("build_options"); +const bun = @import("root").bun; + +pub const enable_allocation = false; +pub const enable_callstack = false; +pub var enable = false; + +// TODO: make this configurable +const callstack_depth = 10; + +const ___tracy_c_zone_context = extern struct { + id: u32 = 0, + active: c_int = 0, + + pub inline fn end(self: @This()) void { + if (!enable) return; + ___tracy_emit_zone_end(self); + } + + pub inline fn addText(self: @This(), text: []const u8) void { + if (!enable) return; + ___tracy_emit_zone_text(self, text.ptr, text.len); + } + + pub inline fn setName(self: @This(), name: []const u8) void { + if (!enable) return; + ___tracy_emit_zone_name(self, name.ptr, name.len); + } + + pub inline fn setColor(self: @This(), color: u32) void { + if (!enable) return; + ___tracy_emit_zone_color(self, color); + } + + pub inline fn setValue(self: @This(), value: u64) void { + if (!enable) return; + ___tracy_emit_zone_value(self, value); + } +}; + +pub const Ctx = ___tracy_c_zone_context; + +pub inline fn trace(comptime src: std.builtin.SourceLocation) Ctx { + if (!enable) return .{}; + + if (enable_callstack) { + return ___tracy_emit_zone_begin_callstack(&.{ + .name = null, + .function = src.fn_name.ptr, + .file = src.file.ptr, + .line = src.line, + .color = 0, + }, callstack_depth, 1); + } else { + const holder = struct { + pub const srcloc = ___tracy_source_location_data{ + .name = null, + .function = src.fn_name.ptr, + .file = src.file.ptr, + .line = src.line, + .color = 0, + }; + }; + + return ___tracy_emit_zone_begin(&holder.srcloc, 1); + } +} + +pub inline fn traceNamed(comptime src: std.builtin.SourceLocation, comptime name: [*:0]const u8) Ctx { + if (!enable) return .{}; + + if (enable_callstack) { + return ___tracy_emit_zone_begin_callstack(&.{ + .name = name, + .function = src.fn_name.ptr, + .file = src.file.ptr, + .line = src.line, + .color = 0, + }, callstack_depth, 1); + } else { + const holder = struct { + pub var data: ___tracy_source_location_data = undefined; + }; + holder.data = ___tracy_source_location_data{ + .name = name, + .function = src.fn_name.ptr, + .file = src.file.ptr, + .line = src.line, + .color = 0, + }; + return ___tracy_emit_zone_begin(&holder.data, 1); + } +} + +pub fn tracyAllocator(allocator: std.mem.Allocator) TracyAllocator(null) { + return TracyAllocator(null).init(allocator); +} + +pub fn TracyAllocator(comptime name: ?[:0]const u8) type { + return struct { + parent_allocator: std.mem.Allocator, + + const Self = @This(); + + pub fn init(parent_allocator: std.mem.Allocator) Self { + return .{ + .parent_allocator = parent_allocator, + }; + } + + pub fn allocator(self: *Self) std.mem.Allocator { + return .{ + .ptr = self, + .vtable = &.{ + .alloc = allocFn, + .resize = resizeFn, + .free = freeFn, + }, + }; + } + + fn allocFn(ptr: *anyopaque, len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8 { + const self = @ptrCast(*Self, @alignCast(@alignOf(Self), ptr)); + const result = self.parent_allocator.rawAlloc(len, ptr_align, ret_addr); + if (result) |data| { + if (len != 0) { + if (name) |n| { + allocNamed(data, len, n); + } else { + alloc(data, len); + } + } + } else { + messageColor("allocation failed", 0xFF0000); + } + return result; + } + + fn resizeFn(ptr: *anyopaque, buf: []u8, buf_align: u8, new_len: usize, ret_addr: usize) bool { + const self = @ptrCast(*Self, @alignCast(@alignOf(Self), ptr)); + if (self.parent_allocator.rawResize(buf, buf_align, new_len, ret_addr)) { + if (name) |n| { + freeNamed(buf.ptr, n); + allocNamed(buf.ptr, new_len, n); + } else { + free(buf.ptr); + alloc(buf.ptr, new_len); + } + + return true; + } + + // during normal operation the compiler hits this case thousands of times due to this + // emitting messages for it is both slow and causes clutter + return false; + } + + fn freeFn(ptr: *anyopaque, buf: []u8, buf_align: u8, ret_addr: usize) void { + const self = @ptrCast(*Self, @alignCast(@alignOf(Self), ptr)); + self.parent_allocator.rawFree(buf, buf_align, ret_addr); + // this condition is to handle free being called on an empty slice that was never even allocated + // example case: `std.process.getSelfExeSharedLibPaths` can return `&[_][:0]u8{}` + if (buf.len != 0) { + if (name) |n| { + freeNamed(buf.ptr, n); + } else { + free(buf.ptr); + } + } + } + }; +} + +// This function only accepts comptime-known strings, see `messageCopy` for runtime strings +pub inline fn message(comptime msg: [:0]const u8) void { + if (!enable) return; + ___tracy_emit_messageL(msg.ptr, if (enable_callstack) callstack_depth else 0); +} + +// This function only accepts comptime-known strings, see `messageColorCopy` for runtime strings +pub inline fn messageColor(comptime msg: [:0]const u8, color: u32) void { + if (!enable) return; + ___tracy_emit_messageLC(msg.ptr, color, if (enable_callstack) callstack_depth else 0); +} + +pub inline fn messageCopy(msg: []const u8) void { + if (!enable) return; + ___tracy_emit_message(msg.ptr, msg.len, if (enable_callstack) callstack_depth else 0); +} + +pub inline fn messageColorCopy(msg: [:0]const u8, color: u32) void { + if (!enable) return; + ___tracy_emit_messageC(msg.ptr, msg.len, color, if (enable_callstack) callstack_depth else 0); +} + +pub inline fn frameMark() void { + if (!enable) return; + ___tracy_emit_frame_mark(null); +} + +pub inline fn frameMarkNamed(comptime name: [:0]const u8) void { + if (!enable) return; + ___tracy_emit_frame_mark(name.ptr); +} + +pub inline fn namedFrame(comptime name: [:0]const u8) Frame(name) { + frameMarkStart(name); + return .{}; +} + +pub fn Frame(comptime name: [:0]const u8) type { + return struct { + pub fn end(_: @This()) void { + frameMarkEnd(name); + } + }; +} + +inline fn frameMarkStart(comptime name: [:0]const u8) void { + if (!enable) return; + ___tracy_emit_frame_mark_start(name.ptr); +} + +inline fn frameMarkEnd(comptime name: [:0]const u8) void { + if (!enable) return; + ___tracy_emit_frame_mark_end(name.ptr); +} + +inline fn alloc(ptr: [*]u8, len: usize) void { + if (!enable) return; + + if (enable_callstack) { + ___tracy_emit_memory_alloc_callstack(ptr, len, callstack_depth, 0); + } else { + ___tracy_emit_memory_alloc(ptr, len, 0); + } +} + +inline fn allocNamed(ptr: [*]u8, len: usize, comptime name: [:0]const u8) void { + if (!enable) return; + + if (enable_callstack) { + ___tracy_emit_memory_alloc_callstack_named(ptr, len, callstack_depth, 0, name.ptr); + } else { + ___tracy_emit_memory_alloc_named(ptr, len, 0, name.ptr); + } +} + +inline fn free(ptr: [*]u8) void { + if (!enable) return; + + if (enable_callstack) { + ___tracy_emit_memory_free_callstack(ptr, callstack_depth, 0); + } else { + ___tracy_emit_memory_free(ptr, 0); + } +} + +inline fn freeNamed(ptr: [*]u8, comptime name: [:0]const u8) void { + if (!enable) return; + + if (enable_callstack) { + ___tracy_emit_memory_free_callstack_named(ptr, callstack_depth, 0, name.ptr); + } else { + ___tracy_emit_memory_free_named(ptr, 0, name.ptr); + } +} + +const Tracy = struct { + pub const emit_frame_mark_start = *const fn (name: [*:0]const u8) callconv(.C) void; + pub const emit_frame_mark_end = *const fn (name: [*:0]const u8) callconv(.C) void; + pub const emit_zone_begin = *const fn ( + srcloc: *const ___tracy_source_location_data, + active: c_int, + ) callconv(.C) ___tracy_c_zone_context; + pub const emit_zone_begin_callstack = *const fn ( + srcloc: *const ___tracy_source_location_data, + depth: c_int, + active: c_int, + ) callconv(.C) ___tracy_c_zone_context; + pub const emit_zone_text = *const fn (ctx: ___tracy_c_zone_context, txt: [*]const u8, size: usize) callconv(.C) void; + pub const emit_zone_name = *const fn (ctx: ___tracy_c_zone_context, txt: [*]const u8, size: usize) callconv(.C) void; + pub const emit_zone_color = *const fn (ctx: ___tracy_c_zone_context, color: u32) callconv(.C) void; + pub const emit_zone_value = *const fn (ctx: ___tracy_c_zone_context, value: u64) callconv(.C) void; + pub const emit_zone_end = *const fn (ctx: ___tracy_c_zone_context) callconv(.C) void; + pub const emit_memory_alloc = *const fn (ptr: *const anyopaque, size: usize, secure: c_int) callconv(.C) void; + pub const emit_memory_alloc_callstack = *const fn (ptr: *const anyopaque, size: usize, depth: c_int, secure: c_int) callconv(.C) void; + pub const emit_memory_free = *const fn (ptr: *const anyopaque, secure: c_int) callconv(.C) void; + pub const emit_memory_free_callstack = *const fn (ptr: *const anyopaque, depth: c_int, secure: c_int) callconv(.C) void; + pub const emit_memory_alloc_named = *const fn (ptr: *const anyopaque, size: usize, secure: c_int, name: [*:0]const u8) callconv(.C) void; + pub const emit_memory_alloc_callstack_named = *const fn (ptr: *const anyopaque, size: usize, depth: c_int, secure: c_int, name: [*:0]const u8) callconv(.C) void; + pub const emit_memory_free_named = *const fn (ptr: *const anyopaque, secure: c_int, name: [*:0]const u8) callconv(.C) void; + pub const emit_memory_free_callstack_named = *const fn (ptr: *const anyopaque, depth: c_int, secure: c_int, name: [*:0]const u8) callconv(.C) void; + pub const emit_message = *const fn (txt: [*]const u8, size: usize, callstack: c_int) callconv(.C) void; + pub const emit_messageL = *const fn (txt: [*:0]const u8, callstack: c_int) callconv(.C) void; + pub const emit_messageC = *const fn (txt: [*]const u8, size: usize, color: u32, callstack: c_int) callconv(.C) void; + pub const emit_messageLC = *const fn (txt: [*:0]const u8, color: u32, callstack: c_int) callconv(.C) void; + pub const emit_frame_mark = *const fn (name: ?[*:0]const u8) callconv(.C) void; + pub const connected = *const fn () callconv(.C) c_int; + pub const set_thread_name = *const fn (name: [*:0]const u8) callconv(.C) void; + pub const startup_profiler = *const fn () callconv(.C) void; + pub const shutdown_profiler = *const fn () callconv(.C) void; +}; + +fn ___tracy_startup_profiler() void { + // these might not exist + const Fn = dlsym(Tracy.startup_profiler, "___tracy_startup_profiler") orelse return; + Fn(); +} + +fn ___tracy_shutdown_profiler() void { + // these might not exist + const Fn = dlsym(Tracy.shutdown_profiler, "___tracy_shutdown_profiler") orelse return; + Fn(); +} + +pub var has_started = false; +pub fn start() void { + if (!enable or has_started) return; + ___tracy_startup_profiler(); +} + +pub fn stop() void { + if (!enable or !has_started) return; + ___tracy_shutdown_profiler(); +} + +fn ___tracy_connected() c_int { + const Fn = dlsym(Tracy.connected, "___tracy_connected").?; + return Fn(); +} + +fn ___tracy_set_thread_name(name: [*:0]const u8) void { + const Fn = dlsym(Tracy.set_thread_name, "___tracy_set_thread_name").?; + Fn(name); +} + +fn ___tracy_emit_frame_mark_start(name: [*:0]const u8) void { + const Fn = dlsym(Tracy.emit_frame_mark_start, "___tracy_emit_frame_mark_start").?; + Fn(name); +} +fn ___tracy_emit_frame_mark_end(name: [*:0]const u8) void { + const Fn = dlsym(Tracy.emit_frame_mark_end, "___tracy_emit_frame_mark_end").?; + Fn(name); +} +fn ___tracy_emit_zone_begin( + srcloc: *const ___tracy_source_location_data, + active: c_int, +) ___tracy_c_zone_context { + const Fn = dlsym(Tracy.emit_zone_begin, "___tracy_emit_zone_begin").?; + return Fn(srcloc, active); +} +fn ___tracy_emit_zone_begin_callstack( + srcloc: *const ___tracy_source_location_data, + depth: c_int, + active: c_int, +) ___tracy_c_zone_context { + const Fn = dlsym(Tracy.emit_zone_begin_callstack, "___tracy_emit_zone_begin_callstack").?; + return Fn(srcloc, depth, active); +} +fn ___tracy_emit_zone_text(ctx: ___tracy_c_zone_context, txt: [*]const u8, size: usize) void { + const Fn = dlsym(Tracy.emit_zone_text, "___tracy_emit_zone_text").?; + Fn(ctx, txt, size); +} +fn ___tracy_emit_zone_name(ctx: ___tracy_c_zone_context, txt: [*]const u8, size: usize) void { + const Fn = dlsym(Tracy.emit_zone_name, "___tracy_emit_zone_name").?; + Fn(ctx, txt, size); +} +fn ___tracy_emit_zone_color(ctx: ___tracy_c_zone_context, color: u32) void { + const Fn = dlsym(Tracy.emit_zone_color, "___tracy_emit_zone_color").?; + Fn(ctx, color); +} +fn ___tracy_emit_zone_value(ctx: ___tracy_c_zone_context, value: u64) void { + const Fn = dlsym(Tracy.emit_zone_value, "___tracy_emit_zone_value").?; + Fn(ctx, value); +} +fn ___tracy_emit_zone_end(ctx: ___tracy_c_zone_context) void { + const Fn = dlsym(Tracy.emit_zone_end, "___tracy_emit_zone_end").?; + Fn(ctx); +} +fn ___tracy_emit_memory_alloc(ptr: *const anyopaque, size: usize, secure: c_int) void { + const Fn = dlsym(Tracy.emit_memory_alloc, "___tracy_emit_memory_alloc").?; + Fn(ptr, size, secure); +} +fn ___tracy_emit_memory_alloc_callstack(ptr: *const anyopaque, size: usize, depth: c_int, secure: c_int) void { + const Fn = dlsym(Tracy.emit_memory_alloc_callstack, "___tracy_emit_memory_alloc_callstack").?; + Fn(ptr, size, depth, secure); +} +fn ___tracy_emit_memory_free(ptr: *const anyopaque, secure: c_int) void { + const Fn = dlsym(Tracy.emit_memory_free, "___tracy_emit_memory_free").?; + Fn(ptr, secure); +} +fn ___tracy_emit_memory_free_callstack(ptr: *const anyopaque, depth: c_int, secure: c_int) void { + const Fn = dlsym(Tracy.emit_memory_free_callstack, "___tracy_emit_memory_free_callstack").?; + Fn(ptr, depth, secure); +} +fn ___tracy_emit_memory_alloc_named(ptr: *const anyopaque, size: usize, secure: c_int, name: [*:0]const u8) void { + const Fn = dlsym(Tracy.emit_memory_alloc_named, "___tracy_emit_memory_alloc_named").?; + Fn(ptr, size, secure, name); +} +fn ___tracy_emit_memory_alloc_callstack_named(ptr: *const anyopaque, size: usize, depth: c_int, secure: c_int, name: [*:0]const u8) void { + const Fn = dlsym(Tracy.emit_memory_alloc_callstack_named, "___tracy_emit_memory_alloc_callstack_named").?; + Fn(ptr, size, depth, secure, name); +} +fn ___tracy_emit_memory_free_named(ptr: *const anyopaque, secure: c_int, name: [*:0]const u8) void { + const Fn = dlsym(Tracy.emit_memory_free_named, "___tracy_emit_memory_free_named").?; + Fn(ptr, secure, name); +} +fn ___tracy_emit_memory_free_callstack_named(ptr: *const anyopaque, depth: c_int, secure: c_int, name: [*:0]const u8) void { + const Fn = dlsym(Tracy.emit_memory_free_callstack_named, "___tracy_emit_memory_free_callstack_named").?; + Fn(ptr, depth, secure, name); +} +fn ___tracy_emit_message(txt: [*]const u8, size: usize, callstack: c_int) void { + const Fn = dlsym(Tracy.emit_message, "___tracy_emit_message").?; + Fn(txt, size, callstack); +} +fn ___tracy_emit_messageL(txt: [*:0]const u8, callstack: c_int) void { + const Fn = dlsym(Tracy.emit_messageL, "___tracy_emit_messageL").?; + Fn(txt, callstack); +} +fn ___tracy_emit_messageC(txt: [*]const u8, size: usize, color: u32, callstack: c_int) void { + const Fn = dlsym(Tracy.emit_messageC, "___tracy_emit_messageC").?; + Fn(txt, size, color, callstack); +} +fn ___tracy_emit_messageLC(txt: [*:0]const u8, color: u32, callstack: c_int) void { + const Fn = dlsym(Tracy.emit_messageLC, "___tracy_emit_messageLC").?; + Fn(txt, color, callstack); +} +fn ___tracy_emit_frame_mark(name: ?[*:0]const u8) void { + const Fn = dlsym(Tracy.emit_frame_mark, "___tracy_emit_frame_mark").?; + Fn(name); +} + +pub fn init() bool { + if (enable) + return true; + + if (dlsym(Tracy.emit_message, "___tracy_emit_message") == null) { + return false; + } + enable = true; + return true; +} + +pub fn isConnected() bool { + if (!enable) + return false; + + const Fn = dlsym(Tracy.connected, "___tracy_connected").?; + return Fn() != 0; +} + +pub fn initThread(comptime name: [:0]const u8) void { + if (!enable) + return; + + dlsym(Tracy.set_thread_name, "___tracy_set_thread_name").?(name.ptr); +} + +const ___tracy_source_location_data = extern struct { + name: ?[*:0]const u8 = null, + function: [*:0]const u8 = "", + file: [*:0]const u8 = "", + line: u32 = 0, + color: u32 = 0, +}; + +fn dlsym(comptime Type: type, comptime symbol: [:0]const u8) ?Type { + if (comptime bun.Environment.isLinux) { + // use LD_PRELOAD on linux + if (bun.C.dlsym(Type, symbol)) |val| { + return val; + } + } + + const Handle = struct { + pub var handle: ?*anyopaque = null; + pub fn getter() ?*anyopaque { + return handle; + } + }; + + get: { + if (Handle.handle == null) { + const RTLD_LAZY = 1; + const RTLD_LOCAL = 4; + + const paths_to_try = if (bun.Environment.isMac) .{ + "/usr/local/opt/tracy/lib/libtracy.dylib", + "/usr/local/lib/libtracy.dylib", + "/opt/homebrew/lib/libtracy.so", + "/opt/homebrew/lib/libtracy.dylib", + "/usr/lib/libtracy.dylib", + "libtracy.dylib", + "libtracy.so", + "libTracyClient.dylib", + "libTracyClient.so", + } else if (bun.Environment.isLinux) .{ + "/usr/local/lib/libtracy.so", + "/usr/local/opt/tracy/lib/libtracy.so", + "/opt/tracy/lib/libtracy.so", + "/usr/lib/libtracy.so", + "/usr/local/lib/libTracyClient.so", + "/usr/local/opt/tracy/lib/libTracyClient.so", + "/opt/tracy/lib/libTracyClient.so", + "/usr/lib/libTracyClient.so", + "libtracy.so", + "libTracyClient.so", + } else if (bun.Environment.isWindows) .{ + "tracy.dll", + } else .{}; + + const RLTD = if (bun.Environment.isMac) + RTLD_LAZY | RTLD_LOCAL + else + 0; + + if (bun.getenvZ("BUN_TRACY_PATH")) |path| { + const handle = std.c.dlopen(&(std.os.toPosixPath(path) catch unreachable), RLTD); + if (handle != null) { + Handle.handle = handle; + break :get; + } + } + inline for (comptime paths_to_try) |path| { + const handle = std.c.dlopen(path, RLTD); + if (handle != null) { + Handle.handle = handle; + break; + } + } + + if (Handle.handle == null) + return null; + } + } + + return bun.C.dlsymWithHandle(Type, symbol, Handle.getter); +} |