16#if __has_include(<memory_resource>)
17#include <memory_resource>
25namespace swoc {
inline namespace SWOC_VERSION_NS {
36#if __has_include(<memory_resource>)
37 :
public std::pmr::memory_resource
59 using unique_ptr = std::unique_ptr<self_type, void (*)(self_type *)>;
95 bool satisfies(
size_t n,
size_t align)
const;
138 static void*
operator new(
size_t block_size,
size_t n);
145 static void*
operator new([[maybe_unused]]
size_t block_size,
void* place)
noexcept;
155 static void operator delete(
void *ptr)
noexcept;
172 static void operator delete([[maybe_unused]]
void *ptr,
void *place)
noexcept;
187 Block *_next{
nullptr};
188 Block *_prev{
nullptr};
232 MemArena(self_type &&that)
noexcept;
241 self_type &
operator=(self_type &&that)
noexcept;
310 template <
typename T,
typename... Args> T *
make(Args &&...args);
460 bool contains(
const void *ptr)
const;
525#if __has_include(<memory_resource>)
529 void *do_allocate(std::size_t bytes, std::size_t align)
override;
533 void do_deallocate(
void *,
size_t,
size_t)
override;
537 bool do_is_equal(std::pmr::memory_resource
const &that)
const noexcept override;
572 template <
typename... Args> T *
make(Args... args);
593MemArena::Block::Linkage::next_ptr(Block *b) -> Block *& {
594 return b->_link._next;
598MemArena::Block::Linkage::prev_ptr(Block *b) -> Block *& {
599 return b->_link._prev;
606 return reinterpret_cast<char *
>(
this + 1);
611 return reinterpret_cast<const char *
>(
this + 1);
616 return this->data() + allocated;
621 return this->data() + allocated;
626 const char *base = this->data();
627 return base <= ptr && ptr < base + size;
632 return size - allocated;
637 return this->remaining() < MIN_FREE_SPACE;
642 auto base = this->data() + allocated;
643 auto pad = align_padding(base, align);
644 if ((n + pad) > this->remaining()) {
645 throw(std::invalid_argument{
"MemArena::Block::alloc size is more than remaining."});
647 MemSpan<void> zret = this->remnant().prefix(n + pad);
648 zret.remove_prefix(pad);
649 allocated += n + pad;
655 return {this->data() + allocated, this->remaining()};
658inline MemArena::Block &
665MemArena::Block::operator
new(
size_t block_size,
size_t n)
667 if (n < block_size) {
668 throw std::invalid_argument(
"MemArena::Block::operator new size is less than object size.");
672 auto b =
static_cast<Block *
>(::malloc(n));
674 throw std::bad_alloc();
680MemArena::Block::operator
new([[maybe_unused]]
size_t block_size,
void* place)
noexcept {
685MemArena::Block::operator
delete(
void *ptr)
noexcept {
689MemArena::Block::operator
delete([[maybe_unused]]
void *ptr,
void *place)
noexcept {
695 if (
auto delta = uintptr_t(ptr) & (align - 1); delta > 0) {
696 return align - delta;
706 return this->
alloc(
sizeof(T) * n,
alignof(T)).
rebind<T>();
709template <
typename T,
typename... Args>
712 return new (this->
alloc(
sizeof(T),
alignof(T)).
data()) T(std::forward<Args>(args)...);
717 memcpy(span.data(), s.data(), span.size());
718 return { span.data(), span.size() };
726 auto span = this->
alloc(s.size() + 1).
rebind<
char>();
727 memcpy(span.data(), s.data(), span.size());
728 span[s.size()] =
'\0';
729 return { span.data(), span.size() };
800 static_assert(
sizeof(T) >=
sizeof(T *));
804template <
typename... Args>
808 void *t = _list._next;
809 _list._next = _list._next->_next;
810 return new (t) T(std::forward<Args>(args)...);
812 return _arena.template make<T>(std::forward<Args>(args)...);
820 auto item =
reinterpret_cast<Item *
>(t);
821 item->_next = _list._next;
829 _list._next =
nullptr;
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
MemArena & _arena
Memory source.
MemArena & arena()
Access the wrapped arena directly.
Item _list
List of dead instances.
FixedArena(MemArena &arena)
void clear()
Drop all items in the free list.
bool contains(const void *ptr) const
IntrusiveDList< Block::Linkage > BlockList
Intrusive list of blocks.
size_t _active_allocated
Total allocations in the active generation.
const_iterator end() const
After Last active block.
MemSpan< void > alloc(size_t n, size_t align=DEFAULT_ALIGNMENT)
self_type & operator=(self_type const &that)=delete
No copy assignment.
size_t _frozen_reserved
Total frozen reserved memory.
MemSpan< char > localize_c(char const *s)
const_iterator begin() const
First active block.
void destroy_frozen()
Clean up the frozen list.
size_t reserved_size() const
MemSpan< char > localize(char const *s)
MemSpan< char > localize_c(MemSpan< char const > s)
BlockList _active
Current generation. Allocate here.
static constexpr size_t DEFAULT_ALIGNMENT
Default memory alignment.
static constexpr size_t ALLOC_HEADER_SIZE
static constexpr size_t DEFAULT_BLOCK_SIZE
Initial block size to allocate if not specified via API.
MemArena & discard(MemSpan< void const > span)
MemSpan< char > localize(MemSpan< char const > s)
MemSpan< T > remnant_span(size_t n)
BlockList::const_iterator const_iterator
Constant element iteration.
MemSpan< void > remnant()
MemArena(self_type const &that)=delete
no copying
const_iterator iterator
Element iteration.
Block * _static_block
Static block, if any.
Block * make_block(size_t n)
MemArena & freeze(size_t n=0)
static self_type * construct_self_contained(size_t n=DEFAULT_BLOCK_SIZE)
void destroy_active()
Clean up the active list.
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
MemSpan< void > remnant(size_t n, size_t align=DEFAULT_ALIGNMENT)
Scalar< 4096 > Page
Size for rounding block sizes.
const_iterator frozen_end() const
After last frozen block.
BlockList _frozen
Previous generation, frozen memory.
MemArena & clear(size_t hint=0)
Scalar< Page::SCALE/4 > QuarterPage
Quarter page - unit for sub page sizes.
const_iterator frozen_begin() const
First frozen block.
static void(* destroyer)(self_type *)
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
MemSpan< T > alloc_span(size_t n)
Scalar< 16 > Paragraph
Minimum unit of memory allocation.
std::unique_ptr< self_type, void(*)(self_type *)> unique_ptr
size_t _frozen_allocated
Total allocations in the previous generation. This is only non-zero while the arena is frozen.
size_t allocated_size() const
constexpr value_type * data() const
Pointer to memory in the span.
MemSpan< U > rebind() const
self_type prefix(size_t n) const
self_type & remove_prefix(size_t count)
static constexpr intmax_t SCALE
For template deduction guides.
MemSpan(std::array< T, N > &) -> MemSpan< T >
Deduction guides.
Scalar_INTERNAL constexpr detail::scalar_unit_round_up_t< C > round_up(C n)
char * allocated_data_end()
MemSpan< void > alloc(size_t n, size_t=DEFAULT_ALIGNMENT)
size_t remaining() const
Amount of unallocated storage.
MemSpan< void > remnant()
Span of unallocated storage.
static size_t align_padding(void const *ptr, size_t align)
char * data()
Get the start of the data in this block.
bool contains(const void *ptr) const
Rebinding type for instances on the free list.
Item * _next
Next item in the free list.
Simple internal arena block of memory. Maintains the underlying memory.
char * allocated_data_end()
MemSpan< void > alloc(size_t n, size_t=DEFAULT_ALIGNMENT)
static constexpr size_t MIN_FREE_SPACE
A block must have at least this much free space to not be "full".
size_t remaining() const
Amount of unallocated storage.
MemSpan< void > remnant()
Span of unallocated storage.
const char * allocated_data_end() const
const char * data() const
Get the start of the data in this block.
static size_t align_padding(void const *ptr, size_t align)
bool satisfies(size_t n, size_t align) const
size_t allocated
Current allocated (in use) bytes.
size_t size
Actual block size.
friend MemArena
Container.
char * data()
Get the start of the data in this block.
bool contains(const void *ptr) const