Broom 1.0.0
A thread-local C++ Garbage Collector
Loading...
Searching...
No Matches
allocator.h
Go to the documentation of this file.
1#pragma once
2
3#include <absl/container/btree_set.h>
4
5#include <cstdint>
6#include <limits>
7#include <type_traits>
8#include <vector>
9
10#include "src/atomics.h"
11#include "src/globals.h"
12#include "src/macros.h"
13
14namespace broom {
15// MemoryRegion is used to encapsulate a pair of {start, size} with helper
16// methods. MemoryRegions should never own the underlying pointer and change its
17// state.
19 private:
20 static bool SetCompareFn(const MemoryRegion& s1, const MemoryRegion& s2) {
21 return s1.Size() < s2.Size();
22 }
23
24 public:
25 MemoryRegion(const void* start, size_t size)
28 BASSERT_GE(end, start, "end must be greater than start: {:#x} >= {:#x}",
29 start, end);
30 }
31 MemoryRegion(const void* start, const void* end)
33 ~MemoryRegion() = default;
34
35 size_t Size() const { return size_; }
36 UintPtr Start() const { return start_; }
37 UintPtr End() const { return start_ + size_; }
38 void* StartPtr() const { return SafeCast<void*>(start_); }
39 void* EndPtr() const { return SafeCast<void*>(start_ + size_); }
40 bool AttemptGrow(size_t howmuch) {
42 }
43 void IncrementStart(size_t how_much) {
44 size_ -= how_much;
46 BENSURE_GT(size_, 0, "Size must not be 0");
47 }
48
49 bool operator==(const MemoryRegion& other) const {
50 return start_ == other.start_ && size_ == other.size_;
51 }
52 bool operator!=(const MemoryRegion& other) const { return !(*this == other); }
53
54 using SetCompare =
55 std::integral_constant<decltype(&SetCompareFn), &SetCompareFn>;
56
57 protected:
59 size_t size_;
60};
61
62static_assert(sizeof(MemoryRegion) == kPointerSize + sizeof(size_t));
63static_assert(std::is_trivially_destructible_v<MemoryRegion>);
64
65// MemorySpace is meant as a light-weight abstraction for an allocated space and
66// a top pointer to keep track how much of the space was allocated. More complex
67// allocation strategies should be implemented in a class derived from
68// Allocator, not MemorySpace.
69//
70// MemorySpace is also meant to be disjoint between threads, and no two threads
71// should have overlapping MemorySpaces.
72class MemorySpace : public MemoryRegion {
73 public:
74 MemorySpace(const void* start, size_t size)
76 ~MemorySpace() = default;
77
78 UintPtr Top() const { return top_; }
79 // Simple bump allocation in the memory space. Memory spaces don't support
80 // freeing and the top_ pointer only ever grows rather than shrinks.
82 // Sanity check.
83 BASSERT(IsAligned<kUintPtrSize>(size), "size ({}) must be pointer aligned",
84 size);
85 BASSERT_GE(top_, Start(),
86 "top address must be greater than or equal to the start "
87 "address: {:#x} < {:#x}",
88 top_, start_);
89 if (top_ >= Start() && (top_ + size) <= End()) {
90 UintPtr old_top = top_;
91 top_ += size;
92 return old_top;
93 }
94 return kNullUintPtr;
95 }
96
97 // Sets the top_ to the end of the space.
98 void Fill() { top_ = End(); }
99 size_t RemainingBytes() const { return End() - top_; }
100
101 bool IsValid() const { return top_ != kNullUintPtr; }
102
103 private:
104 UintPtr top_;
105};
106
107static_assert(sizeof(MemorySpace) == sizeof(MemoryRegion) + kPointerSize);
108static_assert(std::is_trivially_destructible_v<MemorySpace>);
109
110// TODO(gc): Currently not exposed to consumers in any meaningful way.
112 kReadOnly,
115 kNone,
116};
117
119 public:
120 // None of these are used in a concurrent setting, and therefore are safe to
121 // access with non-atomic operations.
122 bool IsCurrentlyAllocated() const { return chunk_bits_ & kAllocatedBit; }
123 bool AlreadyScanned() const { return chunk_bits_ & kMarkBit; }
124 size_t Size() const { return chunk_bits_ >> kSizeShift; }
125 bool IsValid() const { return chunk_bits_ != 0; }
126 void ZeroBytes() { chunk_bits_ = 0; }
127
128 void SetSize(size_t size) {
129 BENSURE_LE(size, kMaxSize, "{} is too large of an allocation size.", size);
130 chunk_bits_ = size << kSizeShift;
131 }
132 void MarkAllocated(size_t size) {
133 BENSURE_LE(size, kMaxSize, "{} is too large of an allocation size.", size);
134 chunk_bits_ = kAllocatedBit | size << kSizeShift;
135 }
136 void MarkDisposed() { chunk_bits_ &= ~kAllocatedBit; }
137 void MarkScanned() { chunk_bits_ |= kMarkBit; }
138 void UnmarkScanned() { chunk_bits_ &= ~kMarkBit; }
139 bool AttemptGrow(uint64_t howmuch) {
140 // Make sure that the top bits are clear.
141 BENSURE_EQ((howmuch << kSizeShift) >> kSizeShift, howmuch,
142 "Expected top bits of {} to be clear", howmuch);
143 return !AddOverflowChecked(chunk_bits_, howmuch << kSizeShift, chunk_bits_);
144 }
145
146 static constexpr size_t MaxSize() { return kMaxSize; }
147
148 private:
149 size_t SizeWithoutAllocatedBit() const {
150 return chunk_bits_ & ~kAllocatedBit;
151 }
152
153 static constexpr const size_t kAllocatedBit = 1;
154 static constexpr const size_t kMarkBit = 2;
155 static constexpr const size_t kSizeShift = 5;
156 // 5 bits free, largest allocation is 0.5 EiB.
157 static constexpr const size_t kMaxSize = 0x7FF'FFFF'FFFF'FFFFull;
158 // The format of the metadata is as follows, 0-indexed:
159 // - Bit 0: Marks if the chunk is allocated or not.
160 // - Bit 1: Used during GC marking.
161 // - Bits 63 - 5: Store the size of the allocation.
162 uint64_t chunk_bits_;
163};
164
165// We don't need to keep track of pin information at the back.
167
168// XXX(gc): Consider inheritance instead.
170 public:
171 // Forward to common metadata.
172 bool IsCurrentlyAllocated() const { return common_.IsCurrentlyAllocated(); }
173 bool AlreadyScanned() const { return common_.AlreadyScanned(); }
174 size_t Size() const { return common_.Size(); }
175 bool IsValid() const { return common_.IsValid(); }
176 void ZeroBytes() {
177 common_.ZeroBytes();
178 pin_count_ = 0;
179 }
180
181 void SetSize(size_t size) { common_.SetSize(size); }
182 void MarkAllocated(size_t size) {
183 common_.MarkAllocated(size);
184 pin_count_ = 0;
185 }
187 common_.MarkDisposed();
188 BASSERT_EQ(pin_count_, 0, "Trying to mark pinned allocation as disposed");
189 }
190 void MarkScanned() { common_.MarkScanned(); }
191 void UnmarkScanned() { common_.UnmarkScanned(); }
192 bool AttemptGrow(size_t howmuch) { return common_.AttemptGrow(howmuch); }
193
194 // Front metadata specific methods. These are used to implement sharing across
195 // threads, and therefore need to be operated on using atomics. Relaxed memory
196 // semantics are sufficient here, since we don't care about the ordering.
197 void Pin(uint32_t how_many) {
198 BROOM_UNUSED uint32_t previous =
199 Atomic::RelaxedFetchAdd(&pin_count_, uint32_t{how_many});
200 BASSERT_LT(previous, std::numeric_limits<uint32_t>::max(),
201 "Overflow of pin_count_");
202 }
203 void Unpin() {
204 BROOM_UNUSED uint32_t previous =
205 Atomic::RelaxedFetchSub(&pin_count_, uint32_t{1});
206 BASSERT_GT(previous, 0, "Expected pin_count_ > 0, {}", previous);
207 }
208 bool IsPinned() { return Atomic::RelaxedLoad(&pin_count_) > 0; }
209
210 private:
211 // The front metadata must mirror the metadata at the back of an allocation.
213 // Atomically counts how many times an allocation was pinned. This is used
214 // during sharing in a multi-threaded environment.
215 uint32_t pin_count_;
216 // Reserved bits, with the dual purpose of alignment.
217 BROOM_UNUSED uint32_t reserved_;
218};
219
220static_assert(alignof(AllocatorFrontMetadata) == kPointerSize);
221static_assert(alignof(AllocatorBackMetadata) == kPointerSize);
222static_assert(std::is_trivially_destructible_v<AllocatorBackMetadata>);
223static_assert(std::is_trivially_destructible_v<AllocatorFrontMetadata>);
224
225enum class MemoryPressure {
226 kLight,
227 kMedium,
228 kCritical,
229};
230
231// The Broom allocator interface. The allocator state is assumed to be
232// thread_local. This allocator interface is private to Broom and shouldn't be
233// exposed outside of this library. However, in order for the garbage collector
234// to work, all of this functionality must be provided.
236 public:
237 // XXX(gc): Decouple the metadata format from the allocator interface?
238 static constexpr const int kFrontMetadataSize =
240 static constexpr const int kBackMetadataSize = sizeof(AllocatorBackMetadata);
241 static constexpr const int kTotalMetadataSize =
243 // XXX(gc): Decouple space size from the allocator interface?
244 static constexpr const size_t kDefaultSpaceSize = 2 * MB;
245 static constexpr const size_t kDefaultLargeSpaceSize = 64 * MB;
246 static constexpr const size_t kDefaultLargeSpaceThreshold = 1 * MB;
247 static constexpr const MemoryPermissions kDefaultPermissions =
249 static constexpr const size_t kGiantMappingSize = kDefaultLargeSpaceSize;
250
251 virtual ~Allocator() = default;
252
253 virtual bool Grow(size_t requested_size, MemorySpace** space_to_update) = 0;
254 virtual size_t LargeSpaceThreshold() const = 0;
255
256 virtual void* Allocate(size_t requested_size) = 0;
257 virtual void Dispose(void* definitely_uintptr) = 0;
259 const void* maybe_inner) const = 0;
260
263 double alloc_to_mapped_ratio =
264 static_cast<double>(stats_.allocated_bytes) /
265 static_cast<double>(stats_.total_mapped_bytes);
266 if (100 * alloc_to_mapped_ratio >= 75.) return MemoryPressure::kCritical;
268 }
269
270 protected:
273 size_t allocated_bytes = 0;
274 size_t free_bytes = 0;
275 };
276 Allocator() = default;
277
278 // XXX(gc): Provide different implementations for different allocators? This
279 // is especially relevant if the metadata is decoupled from the allocator
280 // interface.
296 static size_t SpaceMask(size_t space_size) {
297 BASSERT(IsAligned<2>(space_size), "space_size ({}) must be a power of 2",
298 space_size);
299 return ~(space_size - 1);
300 }
301
303 friend class GarbageCollector;
304};
305
306// The FreeListAllocator class implements an allocator which holds free lists of
307// free'd allocations in the form of a B-tree ordered by the size of the free'd
308// allocation.
310 public:
322 virtual ~FreeListAllocator();
323
324 virtual bool Grow(size_t requested_size,
325 MemorySpace** space_to_update) override;
326 virtual void* Allocate(size_t requested_size) override;
327 virtual void Dispose(void* definitely_uintptr) override;
328 virtual size_t LargeSpaceThreshold() const override {
330 }
331 virtual const void* GetBasePointerOfMaybeInnerPointer(
332 const void* maybe_inner) const override;
333
334 protected:
335 // Ensure out B-trees are compared by size.
336 using FreeList = absl::btree_multiset<MemoryRegion, MemoryRegion::SetCompare>;
337#ifdef BROOM_DEBUG_SLOW
338 // Debugging helpers. Both of these traverse the trees and should be used
339 // sparingly in strategic locations.
340 bool VerifyLists() const;
341 void PrintList(std::string prefix, const FreeList& free_list) const;
342#endif
343 // Find the exact MemoryRegion we are looking for. Simply calling find is not
344 // sufficient as it will find the first matching size. Instead, we need to
345 // keep iterating through the matching sizes until we find the correct region.
346 FreeList::iterator FindExactRegion(FreeList& free_list,
347 const MemoryRegion region) {
348 FreeList::iterator it = free_list.find(region);
349 while (it != free_list.end() && region != *it) {
350 ++it;
351 }
352 return it;
353 }
354 // Allocation/disposal helpers.
355 void* SplitFreeListEntry(FreeList& free_list, FreeList::iterator& pos,
356 size_t requested_size);
360 void* TryAllocateExactOrFindNext(FreeList& free_list, FreeList::iterator& pos,
361 size_t requested_size);
364 const MemoryRegion next,
365 const MemoryRegion coalesced);
368 const MemoryRegion next,
369 const MemoryRegion coalesced);
371 const MemoryRegion current,
373 const MemoryRegion next,
374 const MemoryRegion coalesced);
376
377#ifdef BROOM_FOR_TESTING
378 // These shouldn't be used in Broom and only exist for testing.
379 const FreeList GetFreeList() { return free_list_; }
380 const FreeList GetLargeFreeList() { return large_free_list_; }
381#endif
382
383 // XXX(gc): Experiment with this.
384 static constexpr const size_t kLargeSpaceThreshold = 1 * MB;
385 static constexpr size_t MaximumAllocationSize() {
387 }
388 // Returns the requested allocation size aligned to pointer size, along with
389 // the added size needed to store the allocator metdata.
396
397 // XXX(gc): Split into multiple spaces?
398 std::vector<MemorySpace> spaces_;
399 std::vector<MemorySpace> large_spaces_;
407
408#ifdef BROOM_FOR_TESTING
409 // Needed to expose internal information of the allocator for testing
410 // purposes.
411 friend class TestFreeListAllocator;
412#endif
413};
414} // namespace broom
void MarkAllocated(size_t size)
Definition allocator.h:132
void SetSize(size_t size)
Definition allocator.h:128
static constexpr size_t MaxSize()
Definition allocator.h:146
bool AttemptGrow(uint64_t howmuch)
Definition allocator.h:139
void Pin(uint32_t how_many)
Definition allocator.h:197
bool AttemptGrow(size_t howmuch)
Definition allocator.h:192
bool IsCurrentlyAllocated() const
Definition allocator.h:172
void SetSize(size_t size)
Definition allocator.h:181
void MarkAllocated(size_t size)
Definition allocator.h:182
static void UnpinBasePointer(const void *pointer)
Definition allocator.h:286
virtual const void * GetBasePointerOfMaybeInnerPointer(const void *maybe_inner) const =0
static constexpr const size_t kGiantMappingSize
Definition allocator.h:249
static constexpr const int kFrontMetadataSize
Definition allocator.h:238
static constexpr const size_t kDefaultLargeSpaceSize
Definition allocator.h:245
MemoryPressure CalculateMemoryPressure() const
Definition allocator.h:261
static void PinBasePointer(const void *pointer, uint32_t how_many)
Definition allocator.h:281
static size_t SpaceMask(size_t space_size)
Definition allocator.h:296
static constexpr const int kTotalMetadataSize
Definition allocator.h:241
Allocator()=default
static constexpr const int kBackMetadataSize
Definition allocator.h:240
virtual void Dispose(void *definitely_uintptr)=0
virtual void * Allocate(size_t requested_size)=0
virtual size_t LargeSpaceThreshold() const =0
virtual bool Grow(size_t requested_size, MemorySpace **space_to_update)=0
static constexpr const size_t kDefaultSpaceSize
Definition allocator.h:244
virtual ~Allocator()=default
static constexpr const MemoryPermissions kDefaultPermissions
Definition allocator.h:247
AllocatorStatistics stats_
Definition allocator.h:302
static constexpr const size_t kDefaultLargeSpaceThreshold
Definition allocator.h:246
static bool IsPinnedBasePointer(const void *pointer)
Definition allocator.h:291
virtual void * Allocate(size_t requested_size) override
virtual size_t LargeSpaceThreshold() const override
Definition allocator.h:328
static constexpr size_t GetRealAllocationSize(size_t requested_size)
Definition allocator.h:390
void * TryAllocateExactOrFindNext(FreeList &free_list, FreeList::iterator &pos, size_t requested_size)
virtual bool Grow(size_t requested_size, MemorySpace **space_to_update) override
MemorySpace * current_large_space_
Definition allocator.h:404
FreeListAllocator(size_t space_size=kDefaultSpaceSize, size_t large_space_size=kDefaultLargeSpaceSize, size_t large_space_threshold=kDefaultLargeSpaceThreshold)
Definition allocator.h:311
static constexpr const size_t kLargeSpaceThreshold
Definition allocator.h:384
MemorySpace * current_space_
Definition allocator.h:403
void AddOrCoalesceFreeListImpl(FreeList &free_list, const MemoryRegion current, const MemoryRegion previous, const MemoryRegion next, const MemoryRegion coalesced)
std::vector< MemorySpace > spaces_
Definition allocator.h:398
void FillSpaceAndAddToFreeList(FreeList *list, MemorySpace *space)
void * TryAllocateFromLargeFreeList(size_t requested_size)
void AddOrCoalesceFreeList(const MemoryRegion current, const MemoryRegion previous, const MemoryRegion next, const MemoryRegion coalesced)
void * AllocateRawPointer(size_t requested_size)
void DisposeRawPointer(void *definitely_pointer)
FreeList::iterator FindExactRegion(FreeList &free_list, const MemoryRegion region)
Definition allocator.h:346
void * TryAllocateFromFreeList(size_t requested_size)
absl::btree_multiset< MemoryRegion, MemoryRegion::SetCompare > FreeList
Definition allocator.h:336
void AddOrCoalesceLargeFreeList(const MemoryRegion current, const MemoryRegion previous, const MemoryRegion next, const MemoryRegion coalesced)
virtual void Dispose(void *definitely_uintptr) override
static constexpr size_t MaximumAllocationSize()
Definition allocator.h:385
void * SplitFreeListEntry(FreeList &free_list, FreeList::iterator &pos, size_t requested_size)
virtual const void * GetBasePointerOfMaybeInnerPointer(const void *maybe_inner) const override
void * TryAllocateFromFreeListImpl(FreeList &free_list, size_t requested_size)
std::vector< MemorySpace > large_spaces_
Definition allocator.h:399
size_t Size() const
Definition allocator.h:35
UintPtr Start() const
Definition allocator.h:36
UintPtr End() const
Definition allocator.h:37
MemoryRegion(const void *start, const void *end)
Definition allocator.h:31
void IncrementStart(size_t how_much)
Definition allocator.h:43
bool operator==(const MemoryRegion &other) const
Definition allocator.h:49
MemoryRegion(const void *start, size_t size)
Definition allocator.h:25
MemoryRegion(UintPtr start, UintPtr end)
Definition allocator.h:27
bool AttemptGrow(size_t howmuch)
Definition allocator.h:40
bool operator!=(const MemoryRegion &other) const
Definition allocator.h:52
~MemoryRegion()=default
std::integral_constant< decltype(&SetCompareFn), &SetCompareFn > SetCompare
Definition allocator.h:55
void * StartPtr() const
Definition allocator.h:38
void * EndPtr() const
Definition allocator.h:39
MemorySpace(const void *start, size_t size)
Definition allocator.h:74
UintPtr Top() const
Definition allocator.h:78
size_t RemainingBytes() const
Definition allocator.h:99
bool IsValid() const
Definition allocator.h:101
UintPtr Allocate(size_t size)
Definition allocator.h:81
~MemorySpace()=default
#define BASSERT_EQ(lhs, rhs, m,...)
Definition macros.h:58
#define BASSERT(x, m,...)
Definition macros.h:57
#define BASSERT_GE(lhs, rhs, m,...)
Definition macros.h:59
#define BENSURE_GT(lhs, rhs, m,...)
Definition macros.h:30
#define BENSURE_LE(lhs, rhs, m,...)
Definition macros.h:32
#define BASSERT_LT(lhs, rhs, m,...)
Definition macros.h:62
#define BROOM_UNUSED
Definition macros.h:109
#define BASSERT_GT(lhs, rhs, m,...)
Definition macros.h:60
#define BENSURE_LT(lhs, rhs, m,...)
Definition macros.h:34
#define BENSURE_EQ(lhs, rhs, m,...)
Definition macros.h:26
AllocatorCommonMetadata AllocatorBackMetadata
Definition allocator.h:166
constexpr const size_t MB
Definition globals.h:22
MemoryPermissions
Definition allocator.h:111
constexpr bool AddOverflowChecked(T lhs, T rhs, T &destination)
Definition globals.h:92
constexpr const int kPointerSize
Definition globals.h:17
constexpr const uintptr_t kNullUintPtr
Definition globals.h:11
uintptr_t UintPtr
Definition globals.h:14
constexpr T SafeCast(U v)
Definition globals.h:48
MemoryPressure
Definition allocator.h:225
std::queue< T, broom::deque< T > > queue
Definition broom-queue.h:12
static T RelaxedFetchAdd(T *where, T howmuch)
Definition atomics.h:15
static T RelaxedFetchSub(T *where, T howmuch)
Definition atomics.h:22
static T RelaxedLoad(T *source)
Definition atomics.h:34