LibSWOC++ 1.5.15
Solid Wall of C++
Loading...
Searching...
No Matches
MemArena.cc
Go to the documentation of this file.
1// SPDX-License-Identifier: Apache-2.0
2// Copyright Verizon Media 2020
8#include "swoc/MemArena.h"
9#include <algorithm>
10
11namespace swoc { inline namespace SWOC_VERSION_NS {
12
13void (*MemArena::destroyer)(MemArena *) = std::destroy_at<MemArena>;
14
15inline bool
16MemArena::Block::satisfies(size_t n, size_t align) const {
17 auto r = this->remaining();
18 return r >= (n + align_padding(this->data() + allocated, align));
19}
20
22 static constexpr Scalar<16, size_t> MIN_BLOCK_SIZE = round_up(sizeof(Block) + Block::MIN_FREE_SPACE);
23 if (static_block.size() < MIN_BLOCK_SIZE) {
24 throw std::domain_error("MemArena static block is too small.");
25 }
26 // Construct the block data in the block and put it on the list. Make a note this is the
27 // static block that shouldn't be deleted.
28 auto space = static_block.size() - sizeof(Block);
29 _static_block = new (static_block.data()) Block(space);
30 _active_reserved = space;
31 _active.prepend(_static_block);
32}
33
34// Need to break these out because the default implementation doesn't clear the
35// integral values in @a that.
36
37MemArena::MemArena(swoc::MemArena::self_type &&that) noexcept
38 : _active_allocated(that._active_allocated),
39 _active_reserved(that._active_reserved),
40 _frozen_allocated(that._frozen_allocated),
41 _frozen_reserved(that._frozen_reserved),
42 _reserve_hint(that._reserve_hint),
43 _frozen(std::move(that._frozen)),
44 _active(std::move(that._active)),
45 _static_block(that._static_block) {
46 // Clear data in @a that to indicate all of the memory has been moved.
47 that._active_allocated = that._active_reserved = 0;
48 that._frozen_allocated = that._frozen_reserved = 0;
49 that._reserve_hint = 0;
50 that._static_block = nullptr;
51}
52
55 MemArena tmp{n + sizeof(MemArena)};
56 return tmp.make<MemArena>(std::move(tmp));
57}
58
60MemArena::operator=(swoc::MemArena::self_type &&that) noexcept {
61 this->clear();
62 std::swap(_active_allocated, that._active_allocated);
63 std::swap(_active_reserved, that._active_reserved);
64 std::swap(_frozen_allocated, that._frozen_allocated);
65 std::swap(_frozen_reserved, that._frozen_reserved);
66 std::swap(_reserve_hint, that._reserve_hint);
67 _active = std::move(that._active);
68 _frozen = std::move(that._frozen);
69 return *this;
70}
71
74 // If there's no reservation hint, use the extent. This is transient because the hint is cleared.
75 if (_reserve_hint == 0) {
76 if (_active_reserved) {
78 } else if (_frozen_allocated) { // immediately after freezing - use that extent.
80 }
81 }
82 n = std::max<size_t>(n, _reserve_hint);
83 _reserve_hint = 0; // did this, clear for next time.
84 // Add in overhead and round up to paragraph units.
85 n = Paragraph{round_up(n + ALLOC_HEADER_SIZE + sizeof(Block))};
86 // If more than a page or withing a quarter page of a full page,
87 // round up to page unit size and clip back to account for alloc header.
88 if (n >= (Page::SCALE - QuarterPage::SCALE)) {
90 } else if (n >= QuarterPage::SCALE) { // if at least a quarter page, round up to quarter pages.
91 n = QuarterPage{round_up(n)};
92 }
93
94 // Allocate space for the Block instance and the requested memory and construct a Block at the front.
95 auto free_space = n - sizeof(Block);
96 _active_reserved += free_space;
97 return new (n) Block(free_space);
98}
99
101MemArena::alloc(size_t n, size_t align) {
102 MemSpan<void> zret;
103 this->require(n, align);
104 auto block = _active.head();
105 zret = block->alloc(n, align);
107 // If this block is now full, move it to the back.
108 if (block->is_full() && block != _active.tail()) {
109 _active.erase(block);
110 _active.append(block);
111 }
112 return zret;
113}
114
115MemArena &
117 this->destroy_frozen();
118 _frozen = std::move(_active);
119 // Update the meta data.
124
125 _reserve_hint = n;
126
127 return *this;
128}
129
130MemArena &
132 this->destroy_frozen();
134 if (_static_block) {
135 _static_block->discard();
136 _active.prepend(_static_block);
137 _active_reserved += _static_block->remaining();
138 }
139 return *this;
140}
141
142bool
143MemArena::contains(const void *ptr) const {
144 auto pred = [ptr](const Block &b) -> bool { return b.contains(ptr); };
145
146 return std::any_of(_active.begin(), _active.end(), pred) || std::any_of(_frozen.begin(), _frozen.end(), pred);
147}
148
149MemArena &
150MemArena::require(size_t n, size_t align) {
151 auto spot = _active.begin();
152 Block *block{nullptr};
153
154 // Search back through the list until a full block is hit, which is a miss.
155 while (spot != _active.end() && !spot->satisfies(n, align)) {
156 if (spot->is_full()) {
157 spot = _active.end();
158 } else {
159 ++spot;
160 }
161 }
162 if (spot == _active.end()) { // no block has enough free space
163 block = this->make_block(n); // assuming a new block is sufficiently aligned.
164 _active.prepend(block);
165 } else if (spot != _active.begin()) {
166 // big enough space, move to the head of the list.
167 block = spot;
168 _active.erase(block);
169 _active.prepend(block);
170 }
171 // Invariant - the head active block has at least @a n bytes of free storage.
172 return *this;
173}
174
175void
177 auto sb = _static_block; // C++20 nonsense - capture of @a this is incompatible with C++17.
178 _active
179 .apply([=](Block *b) {
180 if (b != sb) {
181 delete b;
182 }
183 })
184 .clear();
185}
186
187void
189 auto sb = _static_block; // C++20 nonsense - capture of @a this is incompatible with C++17.
190 _frozen
191 .apply([=](Block *b) {
192 if (b != sb) {
193 delete b;
194 }
195 })
196 .clear();
197}
198
199MemArena &
200MemArena::clear(size_t hint) {
204 this->destroy_frozen();
205 this->destroy_active();
206
207 return *this;
208}
209
210MemArena &
212 // This is intended to iterate over empty blocks until @a span is found.
213 for ( auto & block : _active) {
214 if (block.contains(span.data())) { // it's in this block, final iteration.
215 if (block.allocated_data_end() == span.data_end()) {
216 block.allocated -= span.size();
217 _active_allocated -= span.size();
218 }
219 break;
220 } else if (block.allocated > 0) {
221 // If the block wasn't empty the only other place
222 // @a span could be is in the most recent filled block, which is last in the list.
223 // Invariant - the first block does not contain @a span.
224 // Therefore, if the last block contains @a span, it is not the first block.
225 auto lfb = _active.tail(); // list is not empty, must exist.
226 if (lfb->contains(span.data()) && lfb->allocated_data_end() == span.data_end()) {
227 lfb->allocated -= span.size();
228 _active_allocated -= span.size();
229 if (!lfb->is_full()) {
230 _active.erase(lfb);
231 _active.prepend(lfb);
232 }
233 }
234 break; // loop always ends after hitting a non-empty block.
235 }
236 }
237 return *this;
238}
239
240MemArena &
241MemArena::discard(size_t hint) {
242 // Because existing blocks remain, clear the reserve hint so then when a new block is allocated
243 // it uses the allocation size then, not what it is now. Now is handled by the existing blocks,
244 // unless the caller explicitly provides a hint,
245 _reserve_hint = hint ? hint : 0;
246
247 for (auto &block : _active) {
248 block.discard();
249 }
251 return *this;
252}
253
255 // Destruct in a way that makes it safe for the instance to be in one of its own memory blocks.
256 // This means copying members that will be used during the delete.
257 Block *ba = _active.head();
258 Block *bf = _frozen.head();
259 Block *sb = _static_block;
260
261 _active.clear();
262 _frozen.clear();
263 while (bf) {
264 Block *b = bf;
265 bf = bf->_link._next;
266 if (b != sb) {
267 delete b;
268 }
269 }
270 while (ba) {
271 Block *b = ba;
272 ba = ba->_link._next;
273 if (b != sb) {
274 delete b;
275 }
276 }
277}
278
279#if __has_include(<memory_resource>)
280void *
281MemArena::do_allocate(std::size_t bytes, std::size_t align) {
282 return this->alloc(bytes, align).data();
283}
284
285void
286MemArena::do_deallocate(void *, size_t, size_t) {}
287
288bool
289MemArena::do_is_equal(std::pmr::memory_resource const &that) const noexcept {
290 return this == &that;
291}
292#endif
293
294}} // namespace swoc::SWOC_VERSION_NS
bool contains(const void *ptr) const
Definition MemArena.cc:143
self_type & thaw()
Definition MemArena.cc:131
MemSpan< void > alloc(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:101
self_type & operator=(self_type const &that)=delete
No copy assignment.
~MemArena()
Destructor.
Definition MemArena.cc:254
void destroy_frozen()
Clean up the frozen list.
Definition MemArena.cc:188
MemArena & discard(MemSpan< void const > span)
Definition MemArena.cc:211
Block * make_block(size_t n)
Definition MemArena.cc:73
MemArena & freeze(size_t n=0)
Definition MemArena.cc:116
static self_type * construct_self_contained(size_t n=DEFAULT_BLOCK_SIZE)
Definition MemArena.cc:54
void destroy_active()
Clean up the active list.
Definition MemArena.cc:176
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:150
MemArena & clear(size_t hint=0)
Definition MemArena.cc:200
static void(* destroyer)(self_type *)
Definition MemArena.h:47
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
size_t _active_allocated
Total allocations in the active generation.
Definition MemArena.h:504
size_t _frozen_reserved
Total frozen reserved memory.
Definition MemArena.h:508
void destroy_frozen()
Clean up the frozen list.
Definition MemArena.cc:188
BlockList _active
Current generation. Allocate here.
Definition MemArena.h:515
size_t _active_reserved
Definition MemArena.h:505
size_t _reserve_hint
Definition MemArena.h:512
static constexpr size_t ALLOC_HEADER_SIZE
Definition MemArena.h:500
Block * _static_block
Static block, if any.
Definition MemArena.h:518
Block * make_block(size_t n)
Definition MemArena.cc:73
void destroy_active()
Clean up the active list.
Definition MemArena.cc:176
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:150
Scalar< 4096 > Page
Size for rounding block sizes.
Definition MemArena.h:496
BlockList _frozen
Previous generation, frozen memory.
Definition MemArena.h:514
MemArena & clear(size_t hint=0)
Definition MemArena.cc:200
Scalar< Page::SCALE/4 > QuarterPage
Quarter page - unit for sub page sizes.
Definition MemArena.h:497
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
Scalar< 16 > Paragraph
Minimum unit of memory allocation.
Definition MemArena.h:498
size_t _frozen_allocated
Total allocations in the previous generation. This is only non-zero while the arena is frozen.
Definition MemArena.h:507
T * data() const
Definition MemSpan.h:1154
constexpr T * data_end() const
Definition MemSpan.h:1160
constexpr size_t size() const
Number of elements in the span.
Definition MemSpan.h:1178
static constexpr intmax_t SCALE
Definition Scalar.h:177
For template deduction guides.
Definition ArenaWriter.cc:9
Scalar_INTERNAL constexpr detail::scalar_unit_round_up_t< C > round_up(C n)
Definition Scalar.h:559
bool satisfies(size_t n, size_t align) const
Definition MemArena.cc:16
Simple internal arena block of memory. Maintains the underlying memory.
Definition MemArena.h:62
static constexpr size_t MIN_FREE_SPACE
A block must have at least this much free space to not be "full".
Definition MemArena.h:64
size_t remaining() const
Amount of unallocated storage.
struct swoc::MemArena::Block::Linkage _link
Intrusive list support.
static size_t align_padding(void const *ptr, size_t align)
size_t allocated
Current allocated (in use) bytes.
Definition MemArena.h:183
char * data()
Get the start of the data in this block.