11namespace swoc {
inline namespace SWOC_VERSION_NS {
23 if (static_block.
size() < MIN_BLOCK_SIZE) {
24 throw std::domain_error(
"MemArena static block is too small.");
28 auto space = static_block.
size() -
sizeof(
Block);
43 _frozen(std::move(that._frozen)),
44 _active(std::move(that._active)),
47 that._active_allocated = that._active_reserved = 0;
48 that._frozen_allocated = that._frozen_reserved = 0;
49 that._reserve_hint = 0;
50 that._static_block =
nullptr;
56 return tmp.make<
MemArena>(std::move(tmp));
67 _active = std::move(that._active);
68 _frozen = std::move(that._frozen);
97 auto free_space = n -
sizeof(
Block);
99 return new (::malloc(n))
Block(free_space);
107 zret = block->alloc(n, align);
110 if (block->is_full() && block !=
_active.tail()) {
146 auto pred = [ptr](
const Block &b) ->
bool {
return b.contains(ptr); };
154 Block *block{
nullptr};
157 while (spot !=
_active.end() && !spot->satisfies(n, align)) {
158 if (spot->is_full()) {
167 }
else if (spot !=
_active.begin()) {
181 .apply([=](
Block *b) {
193 .apply([=](
Block *b) {
215 for (
auto & block :
_active) {
216 if (block.contains(span.
data())) {
217 if (block.allocated_data_end() == span.
data_end()) {
218 block.allocated -= span.
size();
222 }
else if (block.allocated > 0) {
228 if (lfb->contains(span.
data()) && lfb->allocated_data_end() == span.
data_end()) {
229 lfb->allocated -= span.
size();
231 if (!lfb->is_full()) {
267 bf = bf->
_link._next;
274 ba = ba->
_link._next;
281#if __has_include(<memory_resource>)
283MemArena::do_allocate(std::size_t bytes, std::size_t align) {
284 return this->alloc(bytes, align).data();
288MemArena::do_deallocate(
void *,
size_t,
size_t) {}
291MemArena::do_is_equal(std::pmr::memory_resource
const &that)
const noexcept {
292 return this == &that;
bool contains(const void *ptr) const
MemSpan< void > alloc(size_t n, size_t align=DEFAULT_ALIGNMENT)
self_type & operator=(self_type const &that)=delete
No copy assignment.
void destroy_frozen()
Clean up the frozen list.
MemArena & discard(MemSpan< void const > span)
Block * make_block(size_t n)
MemArena & freeze(size_t n=0)
static self_type * construct_self_contained(size_t n=DEFAULT_BLOCK_SIZE)
void destroy_active()
Clean up the active list.
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
MemArena & clear(size_t hint=0)
static void(* destroyer)(self_type *)
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
size_t _active_allocated
Total allocations in the active generation.
size_t _frozen_reserved
Total frozen reserved memory.
void destroy_frozen()
Clean up the frozen list.
BlockList _active
Current generation. Allocate here.
static constexpr size_t ALLOC_HEADER_SIZE
Block * _static_block
Static block, if any.
Block * make_block(size_t n)
void destroy_active()
Clean up the active list.
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
Scalar< 4096 > Page
Size for rounding block sizes.
BlockList _frozen
Previous generation, frozen memory.
MemArena & clear(size_t hint=0)
Scalar< Page::SCALE/4 > QuarterPage
Quarter page - unit for sub page sizes.
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
Scalar< 16 > Paragraph
Minimum unit of memory allocation.
size_t _frozen_allocated
Total allocations in the previous generation. This is only non-zero while the arena is frozen.
constexpr T * data_end() const
constexpr size_t size() const
Number of elements in the span.
static constexpr intmax_t SCALE
For template deduction guides.
Scalar_INTERNAL constexpr detail::scalar_unit_round_up_t< C > round_up(C n)
bool satisfies(size_t n, size_t align) const
Simple internal arena block of memory. Maintains the underlying memory.
static constexpr size_t MIN_FREE_SPACE
A block must have at least this much free space to not be "full".
size_t remaining() const
Amount of unallocated storage.
struct swoc::MemArena::Block::Linkage _link
Intrusive list support.
static size_t align_padding(void const *ptr, size_t align)
size_t allocated
Current allocated (in use) bytes.
char * data()
Get the start of the data in this block.