aboutsummaryrefslogtreecommitdiff
path: root/src/hive_array.zig
blob: 06ba63ae45c6dca53f8c8b8485763894da30c000 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
const std = @import("std");
const assert = std.debug.assert;
const mem = std.mem;
const testing = std.testing;

/// An array that efficiently tracks which elements are in use.
/// The pointers are intended to be stable
/// Sorta related to https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2021/p0447r15.html
pub fn HiveArray(comptime T: type, comptime capacity: u16) type {
    return struct {
        const Self = @This();
        buffer: [capacity]T = undefined,
        available: std.bit_set.IntegerBitSet(capacity) = std.bit_set.IntegerBitSet(capacity).initFull(),
        pub const size = capacity;

        pub fn init() Self {
            return .{};
        }

        pub fn get(self: *Self) ?*T {
            const index = self.available.findFirstSet() orelse return null;
            self.available.unset(index);
            return &self.buffer[index];
        }

        pub fn at(self: *Self, index: u16) *T {
            assert(index < capacity);
            return &self.buffer[index];
        }

        pub fn claim(self: *Self, index: u16) void {
            assert(index < capacity);
            assert(self.available.isSet(index));
            self.available.unset(index);
        }

        pub fn indexOf(self: *const Self, value: *const T) ?u32 {
            const start = &self.buffer;
            const end = @as([*]const T, @ptrCast(start)) + capacity;
            if (!(@intFromPtr(value) >= @intFromPtr(start) and @intFromPtr(value) < @intFromPtr(end)))
                return null;

            // aligned to the size of T
            const index = (@intFromPtr(value) - @intFromPtr(start)) / @sizeOf(T);
            assert(index < capacity);
            assert(&self.buffer[index] == value);
            return @as(u32, @intCast(index));
        }

        pub fn in(self: *const Self, value: *const T) bool {
            const start = &self.buffer;
            const end = @as([*]const T, @ptrCast(start)) + capacity;
            return (@intFromPtr(value) >= @intFromPtr(start) and @intFromPtr(value) < @intFromPtr(end));
        }

        pub fn put(self: *Self, value: *T) bool {
            const index = self.indexOf(value) orelse return false;

            assert(!self.available.isSet(index));
            assert(&self.buffer[index] == value);

            value.* = undefined;

            self.available.set(index);
            return true;
        }

        pub const Fallback = struct {
            hive: HiveArray(T, capacity),
            allocator: std.mem.Allocator,

            pub const This = @This();

            pub fn init(allocator: std.mem.Allocator) This {
                return .{
                    .allocator = allocator,
                    .hive = HiveArray(T, capacity).init(),
                };
            }

            pub fn get(self: *This) *T {
                if (self.hive.get()) |value| {
                    return value;
                }

                return self.allocator.create(T) catch unreachable;
            }

            pub fn getAndSeeIfNew(self: *This, new: *bool) *T {
                if (self.hive.get()) |value| {
                    new.* = false;
                    return value;
                }

                return self.allocator.create(T) catch unreachable;
            }

            pub fn tryGet(self: *This) !*T {
                if (self.hive.get()) |value| {
                    return value;
                }

                return try self.allocator.create(T);
            }

            pub fn put(self: *This, value: *T) void {
                if (self.hive.put(value)) return;

                self.allocator.destroy(value);
            }
        };
    };
}

test "HiveArray" {
    const size = 64;

    // Choose an integer with a weird alignment
    const Int = u127;

    var a = HiveArray(Int, size).init();

    {
        var b = a.get().?;
        try testing.expect(a.get().? != b);
        try testing.expectEqual(a.indexOf(b), 0);
        try testing.expect(a.put(b));
        try testing.expect(a.get().? == b);
        var c = a.get().?;
        c.* = 123;
        var d: Int = 12345;
        try testing.expect(a.put(&d) == false);
        try testing.expect(a.in(&d) == false);
    }

    a.available = @TypeOf(a.available).initFull();
    {
        var i: u63 = 0;
        while (i < size) {
            var b = a.get().?;
            try testing.expectEqual(a.indexOf(b), i);
            try testing.expect(a.put(b));
            try testing.expect(a.get().? == b);
            i = i + 1;
        }
        i = 0;
        while (i < size) : (i += 1) {
            try testing.expect(a.get() == null);
        }
    }
}
2023-01-05napi_boolean -> napi_numberGravatar Jarred Sumner 1-1/+1 2023-01-05Fixes #1733Gravatar Jarred Sumner 2-67/+79 2023-01-05[socket] fix double-free in `finalize()` (#1731)Gravatar Alex Lam S.L 3-53/+45 2023-01-05BumpGravatar Jarred Sumner 1-1/+1 2023-01-05fix `onConnectError()` error propagation (#1730)Gravatar Alex Lam S.L 1-2/+2 2023-01-05Update tcp-echo.bun.tsGravatar Jarred Sumner 1-13/+15 2023-01-05Really fix #1722Gravatar Jarred Sumner 2-3/+41 2023-01-05improve `.toThrow()` compatibility with Jest (#1728)Gravatar Alex Lam S.L 2-17/+33 2023-01-04Fix Bun.serve typings (#1714)Gravatar u9g 1-2/+2 2023-01-04implement `expect().toThrow()` (#1727)Gravatar Alex Lam S.L 5-130/+370 2023-01-04Add `SharedBuffer` from WebKit to make it easier to import more WebCore stuffGravatar Jarred Sumner 2-0/+1111 2023-01-04Fix default export for streamGravatar Jarred Sumner 1-11/+4 2023-01-04Fixes #1722Gravatar Jarred Sumner 1-1/+2 2023-01-04split server/client for tcp echo benchmark to better measure net.Socket perfGravatar Jarred Sumner 2-58/+60 2023-01-04buffer list clean-ups (#1721)Gravatar Alex Lam S.L 1-37/+68 2023-01-04Support non-classes in node:net (#1712)Gravatar Jarred Sumner 1-198/+216 2023-01-04Fixes #1716Gravatar Jarred Sumner 1-2/+2 2023-01-0410x faster `new Buffer` (#1717)Gravatar Jarred Sumner 19-520/+480 2023-01-03Update README.mdGravatar Jarred Sumner 1-2/+2 2023-01-03Add sqlite to vendorGravatar Jarred Sumner 1-4/+8 2023-01-03Fixes https://github.com/oven-sh/bun/issues/1695Gravatar Jarred Sumner 1-1/+1 2023-01-03Remove usages of std.xGravatar Jarred Sumner 7-98/+75 2023-01-03[streams] speed up `Readable` in some cases (#1708)Gravatar Alex Lam S.L 3-14/+140 2023-01-03Fix crash in BufferListGravatar Jarred Sumner 1-2/+2