GCC Code Coverage Report


Directory: ./
File: libs/capy/src/work_allocator.hpp
Date: 2025-12-30 20:31:36
Exec Total Coverage
Lines: 2 2 100.0%
Functions: 1 1 100.0%
Branches: 0 0 -%

Line Branch Exec Source
1 //
2 // Copyright (c) 2025 Vinnie Falco (vinnie.falco@gmail.com)
3 //
4 // Distributed under the Boost Software License, Version 1.0. (See accompanying
5 // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6 //
7 // Official repository: https://github.com/boostorg/capy
8 //
9
10 #ifndef BOOST_CAPY_SRC_WORK_ALLOCATOR_HPP
11 #define BOOST_CAPY_SRC_WORK_ALLOCATOR_HPP
12
13 #include <cstddef>
14
15 namespace boost {
16 namespace capy {
17
18 /** A pool of arenas for dynamic allocation patterns.
19
20 @par Allocation Order Invariant
21
22 Allocations always come from the newest arena (tail).
23 Once an arena is superseded by a newer one, it never
24 receives new allocations. This ensures all allocations
25 in arena N occurred before all allocations in arena N+1.
26
27 Deallocations can occur in any order. When an older
28 arena empties, it becomes "parked" — held for recycling
29 rather than deleted immediately.
30
31 @par Arena States
32
33 - Active: The tail arena; receives all new allocations.
34 - Draining: Older arenas with outstanding allocations.
35 - Parked: Empty arenas awaiting recycling or deletion.
36
37 @par Recycling
38
39 When the active arena fills, a parked arena may be
40 recycled as the new active arena. This avoids malloc/free
41 churn under steady-state load. Recycled arenas are moved
42 to the tail of the list, becoming the new active arena.
43
44 This class is not thread-safe.
45 */
46 class work_allocator
47 {
48 public:
49 class arena;
50
51 private:
52 arena* head_;
53 arena* tail_;
54 std::size_t arena_count_;
55 std::size_t next_size_;
56 std::size_t min_size_;
57 std::size_t max_size_;
58 std::size_t keep_empty_;
59
60 public:
61 ~work_allocator();
62
63 explicit
64 work_allocator(
65 std::size_t min_size = 4096,
66 std::size_t max_size = 1048576,
67 std::size_t keep_empty = 1);
68
69 work_allocator(work_allocator const&) = delete;
70 work_allocator& operator=(work_allocator const&) = delete;
71
72 /** Return the number of arenas.
73 */
74 std::size_t
75 arena_count() const noexcept
76 {
77 return arena_count_;
78 }
79
80 /** Return allocated memory.
81
82 @throws std::bad_alloc on failure.
83 */
84 void* allocate(std::size_t size, std::size_t align);
85
86 /** Release an allocation.
87 */
88 void deallocate(void* p, std::size_t size, std::size_t align) noexcept;
89
90 private:
91 void link_at_tail(arena* a) noexcept;
92 void unlink(arena* a) noexcept;
93 arena* find_arena(void* p) noexcept;
94 arena* find_parked() noexcept;
95 void prune() noexcept;
96 };
97
98 //------------------------------------------------------------------------------
99
100 /** A fixed-size arena that allocates from high to low addresses.
101
102 Memory is allocated from the top of the buffer downward.
103 Deallocation only decrements a counter; actual memory is
104 reused only when all allocations are released.
105
106 Arenas are linked in a doubly-linked list managed by
107 work_allocator.
108 */
109 class work_allocator::arena
110 {
111 friend class work_allocator;
112
113 arena* prev_;
114 arena* next_;
115 void* base_;
116 std::size_t capacity_;
117 std::size_t offset_;
118 std::size_t count_;
119
120 public:
121 ~arena();
122
123 explicit
124 arena(std::size_t capacity);
125
126 arena(arena const&) = delete;
127 arena& operator=(arena const&) = delete;
128
129 /** Return the total capacity in bytes.
130 */
131 std::size_t
132 capacity() const noexcept
133 {
134 return capacity_;
135 }
136
137 /** Return the number of active allocations.
138 */
139 std::size_t
140 count() const noexcept
141 {
142 return count_;
143 }
144
145 /** Return true if there are no active allocations.
146 */
147 bool
148 38 empty() const noexcept
149 {
150 38 return count_ == 0;
151 }
152
153 /** Return true if the pointer is within this arena.
154 */
155 bool
156 owns(void* p) const noexcept;
157
158 /** Return allocated memory, or nullptr if full.
159 */
160 void*
161 allocate(std::size_t size, std::size_t align) noexcept;
162
163 /** Release an allocation.
164 */
165 void
166 deallocate(void* p, std::size_t size, std::size_t align) noexcept;
167
168 /** Reset the arena for reuse.
169 */
170 void
171 reset() noexcept;
172 };
173
174 } // capy
175 } // boost
176
177 #endif
178