LibSWOC++ 1.5.14
Solid Wall of C++
Loading...
Searching...
No Matches
MemArena.cc
Go to the documentation of this file.
1// SPDX-License-Identifier: Apache-2.0
2// Copyright Verizon Media 2020
8#include "swoc/MemArena.h"
9#include <algorithm>
10
11namespace swoc { inline namespace SWOC_VERSION_NS {
12
13void (*MemArena::destroyer)(MemArena *) = std::destroy_at<MemArena>;
14
15inline bool
16MemArena::Block::satisfies(size_t n, size_t align) const {
17 auto r = this->remaining();
18 return r >= (n + align_padding(this->data() + allocated, align));
19}
20
22 static constexpr Scalar<16, size_t> MIN_BLOCK_SIZE = round_up(sizeof(Block) + Block::MIN_FREE_SPACE);
23 if (static_block.size() < MIN_BLOCK_SIZE) {
24 throw std::domain_error("MemArena static block is too small.");
25 }
26 // Construct the block data in the block and put it on the list. Make a note this is the
27 // static block that shouldn't be deleted.
28 auto space = static_block.size() - sizeof(Block);
29 _static_block = new (static_block.data()) Block(space);
30 _active_reserved = space;
31 _active.prepend(_static_block);
32}
33
34// Need to break these out because the default implementation doesn't clear the
35// integral values in @a that.
36
37MemArena::MemArena(swoc::MemArena::self_type &&that) noexcept
38 : _active_allocated(that._active_allocated),
39 _active_reserved(that._active_reserved),
40 _frozen_allocated(that._frozen_allocated),
41 _frozen_reserved(that._frozen_reserved),
42 _reserve_hint(that._reserve_hint),
43 _frozen(std::move(that._frozen)),
44 _active(std::move(that._active)),
45 _static_block(that._static_block) {
46 // Clear data in @a that to indicate all of the memory has been moved.
47 that._active_allocated = that._active_reserved = 0;
48 that._frozen_allocated = that._frozen_reserved = 0;
49 that._reserve_hint = 0;
50 that._static_block = nullptr;
51}
52
55 MemArena tmp{n + sizeof(MemArena)};
56 return tmp.make<MemArena>(std::move(tmp));
57}
58
60MemArena::operator=(swoc::MemArena::self_type &&that) noexcept {
61 this->clear();
62 std::swap(_active_allocated, that._active_allocated);
63 std::swap(_active_reserved, that._active_reserved);
64 std::swap(_frozen_allocated, that._frozen_allocated);
65 std::swap(_frozen_reserved, that._frozen_reserved);
66 std::swap(_reserve_hint, that._reserve_hint);
67 _active = std::move(that._active);
68 _frozen = std::move(that._frozen);
69 return *this;
70}
71
74 // If there's no reservation hint, use the extent. This is transient because the hint is cleared.
75 if (_reserve_hint == 0) {
76 if (_active_reserved) {
78 } else if (_frozen_allocated) { // immediately after freezing - use that extent.
80 }
81 }
82 n = std::max<size_t>(n, _reserve_hint);
83 _reserve_hint = 0; // did this, clear for next time.
84 // Add in overhead and round up to paragraph units.
85 n = Paragraph{round_up(n + ALLOC_HEADER_SIZE + sizeof(Block))};
86 // If more than a page or withing a quarter page of a full page,
87 // round up to page unit size and clip back to account for alloc header.
88 if (n >= (Page::SCALE - QuarterPage::SCALE)) {
90 } else if (n >= QuarterPage::SCALE) { // if at least a quarter page, round up to quarter pages.
91 n = QuarterPage{round_up(n)};
92 }
93
94 // Allocate space for the Block instance and the request memory and construct a Block at the front.
95 // In theory this could use ::operator new(n) but this causes a size mismatch during ::operator delete.
96 // Easier to use malloc and override @c delete.
97 auto free_space = n - sizeof(Block);
98 _active_reserved += free_space;
99 return new (::malloc(n)) Block(free_space);
100}
101
103MemArena::alloc(size_t n, size_t align) {
104 MemSpan<void> zret;
105 this->require(n, align);
106 auto block = _active.head();
107 zret = block->alloc(n, align);
109 // If this block is now full, move it to the back.
110 if (block->is_full() && block != _active.tail()) {
111 _active.erase(block);
112 _active.append(block);
113 }
114 return zret;
115}
116
117MemArena &
119 this->destroy_frozen();
120 _frozen = std::move(_active);
121 // Update the meta data.
126
127 _reserve_hint = n;
128
129 return *this;
130}
131
132MemArena &
134 this->destroy_frozen();
136 if (_static_block) {
137 _static_block->discard();
138 _active.prepend(_static_block);
139 _active_reserved += _static_block->remaining();
140 }
141 return *this;
142}
143
144bool
145MemArena::contains(const void *ptr) const {
146 auto pred = [ptr](const Block &b) -> bool { return b.contains(ptr); };
147
148 return std::any_of(_active.begin(), _active.end(), pred) || std::any_of(_frozen.begin(), _frozen.end(), pred);
149}
150
151MemArena &
152MemArena::require(size_t n, size_t align) {
153 auto spot = _active.begin();
154 Block *block{nullptr};
155
156 // Search back through the list until a full block is hit, which is a miss.
157 while (spot != _active.end() && !spot->satisfies(n, align)) {
158 if (spot->is_full()) {
159 spot = _active.end();
160 } else {
161 ++spot;
162 }
163 }
164 if (spot == _active.end()) { // no block has enough free space
165 block = this->make_block(n); // assuming a new block is sufficiently aligned.
166 _active.prepend(block);
167 } else if (spot != _active.begin()) {
168 // big enough space, move to the head of the list.
169 block = spot;
170 _active.erase(block);
171 _active.prepend(block);
172 }
173 // Invariant - the head active block has at least @a n bytes of free storage.
174 return *this;
175}
176
177void
179 auto sb = _static_block; // C++20 nonsense - capture of @a this is incompatible with C++17.
180 _active
181 .apply([=](Block *b) {
182 if (b != sb) {
183 delete b;
184 }
185 })
186 .clear();
187}
188
189void
191 auto sb = _static_block; // C++20 nonsense - capture of @a this is incompatible with C++17.
192 _frozen
193 .apply([=](Block *b) {
194 if (b != sb) {
195 delete b;
196 }
197 })
198 .clear();
199}
200
201MemArena &
202MemArena::clear(size_t hint) {
206 this->destroy_frozen();
207 this->destroy_active();
208
209 return *this;
210}
211
212MemArena &
214 // This is intended to iterate over empty blocks until @a span is found.
215 for ( auto & block : _active) {
216 if (block.contains(span.data())) { // it's in this block, final iteration.
217 if (block.allocated_data_end() == span.data_end()) {
218 block.allocated -= span.size();
219 _active_allocated -= span.size();
220 }
221 break;
222 } else if (block.allocated > 0) {
223 // If the block wasn't empty the only other place
224 // @a span could be is in the most recent filled block, which is last in the list.
225 // Invariant - the first block does not contain @a span.
226 // Therefore, if the last block contains @a span, it is not the first block.
227 auto lfb = _active.tail(); // list is not empty, must exist.
228 if (lfb->contains(span.data()) && lfb->allocated_data_end() == span.data_end()) {
229 lfb->allocated -= span.size();
230 _active_allocated -= span.size();
231 if (!lfb->is_full()) {
232 _active.erase(lfb);
233 _active.prepend(lfb);
234 }
235 }
236 break; // loop always ends after hitting a non-empty block.
237 }
238 }
239 return *this;
240}
241
242MemArena &
243MemArena::discard(size_t hint) {
244 // Because existing blocks remain, clear the reserve hint so then when a new block is allocated
245 // it uses the allocation size then, not what it is now. Now is handled by the existing blocks,
246 // unless the caller explicitly provides a hint,
247 _reserve_hint = hint ? hint : 0;
248
249 for (auto &block : _active) {
250 block.discard();
251 }
253 return *this;
254}
255
257 // Destruct in a way that makes it safe for the instance to be in one of its own memory blocks.
258 // This means copying members that will be used during the delete.
259 Block *ba = _active.head();
260 Block *bf = _frozen.head();
261 Block *sb = _static_block;
262
263 _active.clear();
264 _frozen.clear();
265 while (bf) {
266 Block *b = bf;
267 bf = bf->_link._next;
268 if (b != sb) {
269 delete b;
270 }
271 }
272 while (ba) {
273 Block *b = ba;
274 ba = ba->_link._next;
275 if (b != sb) {
276 delete b;
277 }
278 }
279}
280
281#if __has_include(<memory_resource>)
282void *
283MemArena::do_allocate(std::size_t bytes, std::size_t align) {
284 return this->alloc(bytes, align).data();
285}
286
287void
288MemArena::do_deallocate(void *, size_t, size_t) {}
289
290bool
291MemArena::do_is_equal(std::pmr::memory_resource const &that) const noexcept {
292 return this == &that;
293}
294#endif
295
296}} // namespace swoc::SWOC_VERSION_NS
bool contains(const void *ptr) const
Definition MemArena.cc:145
self_type & thaw()
Definition MemArena.cc:133
MemSpan< void > alloc(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:103
self_type & operator=(self_type const &that)=delete
No copy assignment.
~MemArena()
Destructor.
Definition MemArena.cc:256
void destroy_frozen()
Clean up the frozen list.
Definition MemArena.cc:190
MemArena & discard(MemSpan< void const > span)
Definition MemArena.cc:213
Block * make_block(size_t n)
Definition MemArena.cc:73
MemArena & freeze(size_t n=0)
Definition MemArena.cc:118
static self_type * construct_self_contained(size_t n=DEFAULT_BLOCK_SIZE)
Definition MemArena.cc:54
void destroy_active()
Clean up the active list.
Definition MemArena.cc:178
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:152
MemArena & clear(size_t hint=0)
Definition MemArena.cc:202
static void(* destroyer)(self_type *)
Definition MemArena.h:45
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
size_t _active_allocated
Total allocations in the active generation.
Definition MemArena.h:484
size_t _frozen_reserved
Total frozen reserved memory.
Definition MemArena.h:488
void destroy_frozen()
Clean up the frozen list.
Definition MemArena.cc:190
BlockList _active
Current generation. Allocate here.
Definition MemArena.h:495
size_t _active_reserved
Definition MemArena.h:485
size_t _reserve_hint
Definition MemArena.h:492
static constexpr size_t ALLOC_HEADER_SIZE
Definition MemArena.h:480
Block * _static_block
Static block, if any.
Definition MemArena.h:498
Block * make_block(size_t n)
Definition MemArena.cc:73
void destroy_active()
Clean up the active list.
Definition MemArena.cc:178
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:152
Scalar< 4096 > Page
Size for rounding block sizes.
Definition MemArena.h:476
BlockList _frozen
Previous generation, frozen memory.
Definition MemArena.h:494
MemArena & clear(size_t hint=0)
Definition MemArena.cc:202
Scalar< Page::SCALE/4 > QuarterPage
Quarter page - unit for sub page sizes.
Definition MemArena.h:477
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
Scalar< 16 > Paragraph
Minimum unit of memory allocation.
Definition MemArena.h:478
size_t _frozen_allocated
Total allocations in the previous generation. This is only non-zero while the arena is frozen.
Definition MemArena.h:487
T * data() const
Definition MemSpan.h:1154
constexpr T * data_end() const
Definition MemSpan.h:1160
constexpr size_t size() const
Number of elements in the span.
Definition MemSpan.h:1178
static constexpr intmax_t SCALE
Definition Scalar.h:177
For template deduction guides.
Definition ArenaWriter.cc:9
Scalar_INTERNAL constexpr detail::scalar_unit_round_up_t< C > round_up(C n)
Definition Scalar.h:559
bool satisfies(size_t n, size_t align) const
Definition MemArena.cc:16
Simple internal arena block of memory. Maintains the underlying memory.
Definition MemArena.h:60
static constexpr size_t MIN_FREE_SPACE
A block must have at least this much free space to not be "full".
Definition MemArena.h:62
size_t remaining() const
Amount of unallocated storage.
struct swoc::MemArena::Block::Linkage _link
Intrusive list support.
static size_t align_padding(void const *ptr, size_t align)
size_t allocated
Current allocated (in use) bytes.
Definition MemArena.h:163
char * data()
Get the start of the data in this block.