14#if __has_include(<memory_resource>)
15#include <memory_resource>
23namespace swoc {
inline namespace SWOC_VERSION_NS {
34#if __has_include(<memory_resource>)
35 :
public std::pmr::memory_resource
57 using unique_ptr = std::unique_ptr<self_type, void (*)(self_type *)>;
93 bool satisfies(
size_t n,
size_t align)
const;
135 static void operator delete(
void *ptr)
noexcept;
152 static void operator delete([[maybe_unused]]
void *ptr,
void *place)
noexcept;
167 Block *_next{
nullptr};
168 Block *_prev{
nullptr};
212 MemArena(self_type &&that)
noexcept;
221 self_type &
operator=(self_type &&that)
noexcept;
290 template <
typename T,
typename... Args> T *
make(Args &&...args);
440 bool contains(
const void *ptr)
const;
505#if __has_include(<memory_resource>)
509 void *do_allocate(std::size_t bytes, std::size_t align)
override;
513 void do_deallocate(
void *,
size_t,
size_t)
override;
517 bool do_is_equal(std::pmr::memory_resource
const &that)
const noexcept override;
552 template <
typename... Args> T *
make(Args... args);
573MemArena::Block::Linkage::next_ptr(Block *b) -> Block *& {
574 return b->_link._next;
578MemArena::Block::Linkage::prev_ptr(Block *b) -> Block *& {
579 return b->_link._prev;
586 return reinterpret_cast<char *
>(
this + 1);
591 return reinterpret_cast<const char *
>(
this + 1);
596 return this->data() + allocated;
601 return this->data() + allocated;
606 const char *base = this->data();
607 return base <= ptr && ptr < base + size;
612 return size - allocated;
617 return this->remaining() < MIN_FREE_SPACE;
622 auto base = this->data() + allocated;
623 auto pad = align_padding(base, align);
624 if ((n + pad) > this->remaining()) {
625 throw(std::invalid_argument{
"MemArena::Block::alloc size is more than remaining."});
627 MemSpan<void> zret = this->remnant().prefix(n + pad);
628 zret.remove_prefix(pad);
629 allocated += n + pad;
635 return {this->data() + allocated, this->remaining()};
638inline MemArena::Block &
645MemArena::Block::operator
delete(
void *ptr)
noexcept {
649MemArena::Block::operator
delete([[maybe_unused]]
void *ptr,
void *place)
noexcept {
655 if (
auto delta = uintptr_t(ptr) & (align - 1); delta > 0) {
656 return align - delta;
666 return this->
alloc(
sizeof(T) * n,
alignof(T)).
rebind<T>();
669template <
typename T,
typename... Args>
672 return new (this->
alloc(
sizeof(T),
alignof(T)).
data()) T(std::forward<Args>(args)...);
677 memcpy(span.data(), s.data(), span.size());
678 return { span.data(), span.size() };
686 auto span = this->
alloc(s.size() + 1).
rebind<
char>();
687 memcpy(span.data(), s.data(), span.size());
688 span[s.size()] =
'\0';
689 return { span.data(), span.size() };
760 static_assert(
sizeof(T) >=
sizeof(T *));
764template <
typename... Args>
768 void *t = _list._next;
769 _list._next = _list._next->_next;
770 return new (t) T(std::forward<Args>(args)...);
772 return _arena.template make<T>(std::forward<Args>(args)...);
780 auto item =
reinterpret_cast<Item *
>(t);
781 item->_next = _list._next;
789 _list._next =
nullptr;
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
MemArena & _arena
Memory source.
MemArena & arena()
Access the wrapped arena directly.
Item _list
List of dead instances.
FixedArena(MemArena &arena)
void clear()
Drop all items in the free list.
bool contains(const void *ptr) const
IntrusiveDList< Block::Linkage > BlockList
Intrusive list of blocks.
size_t _active_allocated
Total allocations in the active generation.
const_iterator end() const
After Last active block.
MemSpan< void > alloc(size_t n, size_t align=DEFAULT_ALIGNMENT)
self_type & operator=(self_type const &that)=delete
No copy assignment.
size_t _frozen_reserved
Total frozen reserved memory.
MemSpan< char > localize_c(char const *s)
const_iterator begin() const
First active block.
void destroy_frozen()
Clean up the frozen list.
size_t reserved_size() const
MemSpan< char > localize(char const *s)
MemSpan< char > localize_c(MemSpan< char const > s)
BlockList _active
Current generation. Allocate here.
static constexpr size_t DEFAULT_ALIGNMENT
Default memory alignment.
static constexpr size_t ALLOC_HEADER_SIZE
static constexpr size_t DEFAULT_BLOCK_SIZE
Initial block size to allocate if not specified via API.
MemArena & discard(MemSpan< void const > span)
MemSpan< char > localize(MemSpan< char const > s)
MemSpan< T > remnant_span(size_t n)
BlockList::const_iterator const_iterator
Constant element iteration.
MemSpan< void > remnant()
MemArena(self_type const &that)=delete
no copying
const_iterator iterator
Element iteration.
Block * _static_block
Static block, if any.
Block * make_block(size_t n)
MemArena & freeze(size_t n=0)
static self_type * construct_self_contained(size_t n=DEFAULT_BLOCK_SIZE)
void destroy_active()
Clean up the active list.
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
MemSpan< void > remnant(size_t n, size_t align=DEFAULT_ALIGNMENT)
Scalar< 4096 > Page
Size for rounding block sizes.
const_iterator frozen_end() const
After last frozen block.
BlockList _frozen
Previous generation, frozen memory.
MemArena & clear(size_t hint=0)
Scalar< Page::SCALE/4 > QuarterPage
Quarter page - unit for sub page sizes.
const_iterator frozen_begin() const
First frozen block.
static void(* destroyer)(self_type *)
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
MemSpan< T > alloc_span(size_t n)
Scalar< 16 > Paragraph
Minimum unit of memory allocation.
std::unique_ptr< self_type, void(*)(self_type *)> unique_ptr
size_t _frozen_allocated
Total allocations in the previous generation. This is only non-zero while the arena is frozen.
size_t allocated_size() const
constexpr value_type * data() const
Pointer to memory in the span.
MemSpan< U > rebind() const
self_type prefix(size_t n) const
self_type & remove_prefix(size_t count)
static constexpr intmax_t SCALE
For template deduction guides.
MemSpan(std::array< T, N > &) -> MemSpan< T >
Deduction guides.
Scalar_INTERNAL constexpr detail::scalar_unit_round_up_t< C > round_up(C n)
char * allocated_data_end()
MemSpan< void > alloc(size_t n, size_t=DEFAULT_ALIGNMENT)
size_t remaining() const
Amount of unallocated storage.
MemSpan< void > remnant()
Span of unallocated storage.
static size_t align_padding(void const *ptr, size_t align)
char * data()
Get the start of the data in this block.
bool contains(const void *ptr) const
Rebinding type for instances on the free list.
Item * _next
Next item in the free list.
Simple internal arena block of memory. Maintains the underlying memory.
char * allocated_data_end()
MemSpan< void > alloc(size_t n, size_t=DEFAULT_ALIGNMENT)
static constexpr size_t MIN_FREE_SPACE
A block must have at least this much free space to not be "full".
size_t remaining() const
Amount of unallocated storage.
MemSpan< void > remnant()
Span of unallocated storage.
const char * allocated_data_end() const
const char * data() const
Get the start of the data in this block.
static size_t align_padding(void const *ptr, size_t align)
bool satisfies(size_t n, size_t align) const
size_t allocated
Current allocated (in use) bytes.
size_t size
Actual block size.
friend MemArena
Container.
char * data()
Get the start of the data in this block.
bool contains(const void *ptr) const