LibSWOC++ 1.5.15
Solid Wall of C++
Loading...
Searching...
No Matches
MemArena.h
Go to the documentation of this file.
1// SPDX-License-Identifier: Apache-2.0
2// Copyright Verizon Media 2020
7
8#pragma once
9
10#include <mutex>
11#include <memory>
12#include <stdexcept>
13#include <type_traits>
14#include <utility>
15#include <new>
16#if __has_include(<memory_resource>)
17#include <memory_resource>
18#endif
19
20#include "swoc/MemSpan.h"
21#include "swoc/Scalar.h"
22#include "swoc/IntrusiveDList.h"
23#include "swoc/TextView.h"
24
25namespace swoc { inline namespace SWOC_VERSION_NS {
26
36#if __has_include(<memory_resource>)
37 : public std::pmr::memory_resource
38#endif
39{
40 using self_type = MemArena;
41
42public:
43 static constexpr size_t DEFAULT_ALIGNMENT{1};
44
47 static void (*destroyer)(self_type *);
48
59 using unique_ptr = std::unique_ptr<self_type, void (*)(self_type *)>;
60
62 struct Block {
64 static constexpr size_t MIN_FREE_SPACE = 16;
65
67 char *data();
68
70 const char *data() const;
71
74
76 const char *allocated_data_end() const;
77
79 size_t remaining() const;
80
87 static size_t align_padding(void const *ptr, size_t align);
88
95 bool satisfies(size_t n, size_t align) const;
96
99
107
115
121 bool contains(const void *ptr) const;
122
124 bool is_full() const;
125
126 protected:
127 friend MemArena;
128
138 static void* operator new(size_t block_size, size_t n);
139
145 static void* operator new([[maybe_unused]] size_t block_size, void* place) noexcept;
146
155 static void operator delete(void *ptr) noexcept;
156
172 static void operator delete([[maybe_unused]] void *ptr, void *place) noexcept;
173
180 explicit Block(size_t n) noexcept;
181
182 size_t size;
183 size_t allocated{0};
184
185 struct Linkage {
187 Block *_next{nullptr};
188 Block *_prev{nullptr};
189
190 static Block *&next_ptr(Block *);
191
192 static Block *&prev_ptr(Block *);
194 } _link;
195 };
196
198 using BlockList = IntrusiveDList<Block::Linkage>;
199
213 explicit MemArena(size_t n = DEFAULT_BLOCK_SIZE);
214
226 explicit MemArena(MemSpan<void> static_block);
227
229 MemArena(self_type const &that) = delete;
230
232 MemArena(self_type &&that) noexcept;
233
235 ~MemArena();
236
238 self_type &operator=(self_type const &that) = delete;
239
241 self_type &operator=(self_type &&that) noexcept;
242
264 static self_type *construct_self_contained(size_t n = DEFAULT_BLOCK_SIZE);
265
276 MemSpan<void> alloc(size_t n, size_t align = DEFAULT_ALIGNMENT);
277
292 template <typename T> MemSpan<T> alloc_span(size_t n);
293
310 template <typename T, typename... Args> T *make(Args &&...args);
311
318
324 MemSpan<char> localize(char const * s);
325
335
344 MemSpan<char> localize_c(char const * s);
345
356 MemArena &freeze(size_t n = 0);
357
364 self_type &thaw();
365
378 MemArena &clear(size_t hint = 0);
379
398
409 MemArena &discard(size_t hint = 0);
410
412 size_t size() const;
413
415 size_t remaining() const;
416
426 template <typename T> MemSpan<T> remnant_span(size_t n);
427
430
439 MemSpan<void> remnant(size_t n, size_t align = DEFAULT_ALIGNMENT);
440
450 self_type &require(size_t n, size_t align = DEFAULT_ALIGNMENT);
451
453 size_t allocated_size() const;
454
460 bool contains(const void *ptr) const;
461
465 size_t reserved_size() const;
466
467 using const_iterator = BlockList::const_iterator;
469
472
475
478
481
482protected:
488 Block *make_block(size_t n);
489
491 void destroy_frozen();
492
494 void destroy_active();
495
499
500 static constexpr size_t ALLOC_HEADER_SIZE = 16;
503
504 size_t _active_allocated = 0;
505 size_t _active_reserved = 0;
508 size_t _frozen_reserved = 0;
509
512 size_t _reserve_hint = 0;
513
516
519
520 // Note on _active block list - blocks that become full are moved to the end of the list.
521 // This means that when searching for a block with space, the first full block encountered
522 // marks the last block to check. This keeps the set of blocks to check short.
523
524private:
525#if __has_include(<memory_resource>)
526 // PMR support methods.
527
529 void *do_allocate(std::size_t bytes, std::size_t align) override;
530
533 void do_deallocate(void *, size_t, size_t) override;
534
537 bool do_is_equal(std::pmr::memory_resource const &that) const noexcept override;
538#endif
539};
540
548template <typename T> class FixedArena {
549 using self_type = FixedArena;
550protected:
552 struct Item {
554 };
555
556 Item _list{nullptr};
558
559public:
565
572 template <typename... Args> T *make(Args... args);
573
580 void destroy(T *t);
581
583 void clear();
584
587};
588
589// --- Implementation ---
591
592inline auto
593MemArena::Block::Linkage::next_ptr(Block *b) -> Block *& {
594 return b->_link._next;
595}
596
597inline auto
598MemArena::Block::Linkage::prev_ptr(Block *b) -> Block *& {
599 return b->_link._prev;
600}
601
602inline MemArena::Block::Block(size_t n) noexcept : size(n) {}
603
604inline char *
606 return reinterpret_cast<char *>(this + 1);
607}
608
609inline const char *
610MemArena::Block::data() const {
611 return reinterpret_cast<const char *>(this + 1);
612}
613
614inline char *
616 return this->data() + allocated;
617}
618
619inline const char *
621 return this->data() + allocated;
622}
623
624inline bool
625MemArena::Block::contains(const void *ptr) const {
626 const char *base = this->data();
627 return base <= ptr && ptr < base + size;
628}
629
630inline size_t
632 return size - allocated;
633}
634
635inline bool
637 return this->remaining() < MIN_FREE_SPACE;
638}
639
640inline MemSpan<void>
641MemArena::Block::alloc(size_t n, size_t align) {
642 auto base = this->data() + allocated;
643 auto pad = align_padding(base, align);
644 if ((n + pad) > this->remaining()) {
645 throw(std::invalid_argument{"MemArena::Block::alloc size is more than remaining."});
646 }
647 MemSpan<void> zret = this->remnant().prefix(n + pad);
648 zret.remove_prefix(pad);
649 allocated += n + pad;
650 return zret;
651}
652
653inline MemSpan<void>
655 return {this->data() + allocated, this->remaining()};
656}
657
658inline MemArena::Block &
660 allocated = 0;
661 return *this;
662}
663
664inline void*
665MemArena::Block::operator new(size_t block_size, size_t n)
666{
667 if (n < block_size) {
668 throw std::invalid_argument("MemArena::Block::operator new size is less than object size.");
669 }
670 // In theory we could use ::operator new(n) but this causes a size mismatch during ::operator delete.
671 // Easier to use malloc here and also override @c delete.
672 auto b = static_cast<Block *>(::malloc(n));
673 if (b == nullptr) {
674 throw std::bad_alloc();
675 }
676 return b;
677}
678
679inline void*
680MemArena::Block::operator new([[maybe_unused]] size_t block_size, void* place) noexcept {
681 return place;
682}
683
684inline void
685MemArena::Block::operator delete(void *ptr) noexcept {
686 ::free(ptr);
687}
688inline void
689MemArena::Block::operator delete([[maybe_unused]] void *ptr, void *place) noexcept {
690 ::free(place);
691}
692
693inline size_t
694MemArena::Block::align_padding(void const *ptr, size_t align) {
695 if (auto delta = uintptr_t(ptr) & (align - 1); delta > 0) {
696 return align - delta;
697 }
698 return 0;
699}
700
701inline MemArena::MemArena(size_t n) : _reserve_hint(n) {}
702
703template <typename T>
705MemArena::alloc_span(size_t n) {
706 return this->alloc(sizeof(T) * n, alignof(T)).rebind<T>();
707}
708
709template <typename T, typename... Args>
710T *
711MemArena::make(Args &&...args) {
712 return new (this->alloc(sizeof(T), alignof(T)).data()) T(std::forward<Args>(args)...);
713}
714
716 auto span = this->alloc(s.size()).rebind<char>();
717 memcpy(span.data(), s.data(), span.size());
718 return { span.data(), span.size() };
719}
720
721inline MemSpan<char> MemArena::localize(char const * s) {
722 return this->localize(MemSpan<char const>(s, strlen(s)));
723}
724
726 auto span = this->alloc(s.size() + 1).rebind<char>();
727 memcpy(span.data(), s.data(), span.size());
728 span[s.size()] = '\0';
729 return { span.data(), span.size() };
730}
731
732inline MemSpan<char> MemArena::localize_c(char const * s) {
733 return this->localize_c(MemSpan<char const>(s, strlen(s)));
734}
735
736template <typename T>
738MemArena::remnant_span(size_t n) {
739 auto span = this->require(sizeof(T) * n, alignof(T)).remnant();
740 return span.remove_prefix(Block::align_padding(span.data(), alignof(T))).rebind<T>();
741}
742
743template <>
744inline MemSpan<void>
746 return this->require(n).remnant().prefix(n);
747}
748
749inline MemSpan<void>
750MemArena::remnant(size_t n, size_t align) {
751 return this->require(n, align).remnant().prefix(n);
752}
753
754inline size_t
755MemArena::size() const {
756 return _active_allocated;
757}
758
759inline size_t
762}
763
764inline size_t
765MemArena::remaining() const {
766 return _active.empty() ? 0 : _active.head()->remaining();
767}
768
769inline MemSpan<void>
771 return _active.empty() ? MemSpan<void>() : _active.head()->remnant();
772}
773
774inline size_t
777}
778
779inline auto
780MemArena::begin() const -> const_iterator {
781 return _active.begin();
782}
783
784inline auto
785MemArena::end() const -> const_iterator {
786 return _active.end();
787}
788
789inline auto
790MemArena::frozen_begin() const -> const_iterator {
791 return _frozen.begin();
792}
793
794inline auto
795MemArena::frozen_end() const -> const_iterator {
796 return _frozen.end();
797}
798
799template <typename T> FixedArena<T>::FixedArena(MemArena &arena) : _arena(arena) {
800 static_assert(sizeof(T) >= sizeof(T *));
801}
802
803template <typename T>
804template <typename... Args>
805T *
806FixedArena<T>::make(Args... args) {
807 if (_list._next) {
808 void *t = _list._next;
809 _list._next = _list._next->_next;
810 return new (t) T(std::forward<Args>(args)...);
811 }
812 return _arena.template make<T>(std::forward<Args>(args)...);
813}
814
815template <typename T>
816void
818 if (t) {
819 t->~T(); // destructor.
820 auto item = reinterpret_cast<Item *>(t);
821 item->_next = _list._next;
822 _list._next = item;
823 }
824}
825
826template <typename T>
827void
829 _list._next = nullptr;
830}
831
832template <typename T>
833MemArena &
835 return _arena;
836}
837
839
840}} // namespace swoc::SWOC_VERSION_NS
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
MemArena & _arena
Memory source.
Definition MemArena.h:557
MemArena & arena()
Access the wrapped arena directly.
Item _list
List of dead instances.
Definition MemArena.h:556
void destroy(T *t)
FixedArena(MemArena &arena)
void clear()
Drop all items in the free list.
T * make(Args... args)
bool contains(const void *ptr) const
Definition MemArena.cc:143
IntrusiveDList< Block::Linkage > BlockList
Intrusive list of blocks.
Definition MemArena.h:198
self_type & thaw()
Definition MemArena.cc:131
size_t size() const
size_t _active_allocated
Total allocations in the active generation.
Definition MemArena.h:504
const_iterator end() const
After Last active block.
MemSpan< void > alloc(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:101
self_type & operator=(self_type const &that)=delete
No copy assignment.
size_t _frozen_reserved
Total frozen reserved memory.
Definition MemArena.h:508
MemSpan< char > localize_c(char const *s)
const_iterator begin() const
First active block.
void destroy_frozen()
Clean up the frozen list.
Definition MemArena.cc:188
size_t reserved_size() const
MemSpan< char > localize(char const *s)
MemSpan< char > localize_c(MemSpan< char const > s)
BlockList _active
Current generation. Allocate here.
Definition MemArena.h:515
static constexpr size_t DEFAULT_ALIGNMENT
Default memory alignment.
Definition MemArena.h:43
size_t _active_reserved
Definition MemArena.h:505
size_t _reserve_hint
Definition MemArena.h:512
static constexpr size_t ALLOC_HEADER_SIZE
Definition MemArena.h:500
static constexpr size_t DEFAULT_BLOCK_SIZE
Initial block size to allocate if not specified via API.
Definition MemArena.h:502
MemArena & discard(MemSpan< void const > span)
Definition MemArena.cc:211
MemSpan< char > localize(MemSpan< char const > s)
MemSpan< T > remnant_span(size_t n)
BlockList::const_iterator const_iterator
Constant element iteration.
Definition MemArena.h:467
MemSpan< void > remnant()
MemArena(self_type const &that)=delete
no copying
const_iterator iterator
Element iteration.
Definition MemArena.h:468
Block * _static_block
Static block, if any.
Definition MemArena.h:518
Block * make_block(size_t n)
Definition MemArena.cc:73
MemArena & freeze(size_t n=0)
Definition MemArena.cc:116
T * make(Args &&...args)
static self_type * construct_self_contained(size_t n=DEFAULT_BLOCK_SIZE)
Definition MemArena.cc:54
void destroy_active()
Clean up the active list.
Definition MemArena.cc:176
self_type & require(size_t n, size_t align=DEFAULT_ALIGNMENT)
Definition MemArena.cc:150
MemSpan< void > remnant(size_t n, size_t align=DEFAULT_ALIGNMENT)
Scalar< 4096 > Page
Size for rounding block sizes.
Definition MemArena.h:496
const_iterator frozen_end() const
After last frozen block.
size_t remaining() const
BlockList _frozen
Previous generation, frozen memory.
Definition MemArena.h:514
MemArena & clear(size_t hint=0)
Definition MemArena.cc:200
Scalar< Page::SCALE/4 > QuarterPage
Quarter page - unit for sub page sizes.
Definition MemArena.h:497
const_iterator frozen_begin() const
First frozen block.
static void(* destroyer)(self_type *)
Definition MemArena.h:47
MemArena(size_t n=DEFAULT_BLOCK_SIZE)
MemSpan< T > alloc_span(size_t n)
Scalar< 16 > Paragraph
Minimum unit of memory allocation.
Definition MemArena.h:498
std::unique_ptr< self_type, void(*)(self_type *)> unique_ptr
Definition MemArena.h:59
size_t _frozen_allocated
Total allocations in the previous generation. This is only non-zero while the arena is frozen.
Definition MemArena.h:507
size_t allocated_size() const
constexpr value_type * data() const
Pointer to memory in the span.
Definition MemSpan.h:1467
MemSpan< U > rebind() const
Definition MemSpan.h:1658
self_type prefix(size_t n) const
Definition MemSpan.h:1512
self_type & remove_prefix(size_t count)
Definition MemSpan.h:1525
static constexpr intmax_t SCALE
Definition Scalar.h:177
For template deduction guides.
Definition ArenaWriter.cc:9
MemSpan(std::array< T, N > &) -> MemSpan< T >
Deduction guides.
Scalar_INTERNAL constexpr detail::scalar_unit_round_up_t< C > round_up(C n)
Definition Scalar.h:559
bool is_full() const
char * allocated_data_end()
MemSpan< void > alloc(size_t n, size_t=DEFAULT_ALIGNMENT)
Block(size_t n) noexcept
size_t remaining() const
Amount of unallocated storage.
MemSpan< void > remnant()
Span of unallocated storage.
Block & discard()
static size_t align_padding(void const *ptr, size_t align)
char * data()
Get the start of the data in this block.
bool contains(const void *ptr) const
Rebinding type for instances on the free list.
Definition MemArena.h:552
Item * _next
Next item in the free list.
Definition MemArena.h:553
Simple internal arena block of memory. Maintains the underlying memory.
Definition MemArena.h:62
bool is_full() const
MemSpan< void > alloc(size_t n, size_t=DEFAULT_ALIGNMENT)
static constexpr size_t MIN_FREE_SPACE
A block must have at least this much free space to not be "full".
Definition MemArena.h:64
Block(size_t n) noexcept
size_t remaining() const
Amount of unallocated storage.
MemSpan< void > remnant()
Span of unallocated storage.
const char * allocated_data_end() const
const char * data() const
Get the start of the data in this block.
static size_t align_padding(void const *ptr, size_t align)
bool satisfies(size_t n, size_t align) const
Definition MemArena.cc:16
size_t allocated
Current allocated (in use) bytes.
Definition MemArena.h:183
size_t size
Actual block size.
Definition MemArena.h:182
friend MemArena
Container.
Definition MemArena.h:127
char * data()
Get the start of the data in this block.
bool contains(const void *ptr) const