1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
|
#include "utils/HeapAllocator.h"
namespace {
const size_t HEAP_SIZE_INCREASE = 1 * 1024 * 1024; // Always increment in 1MB steps
const size_t HEAP_MAX_INCREASE = 20 * 1024 * 1024; // never increase heap size by more than 20MB
template<typename TIter, typename T>
TIter get_element_before(TIter first, TIter last, const T& value) {
if (first == last) {
// There are no elements in the range
return last;
}
auto iter = std::upper_bound(first, last, value);
if (iter == first) {
// The element before this does not exist
return last;
}
// We found the position (which may be last) so now we can decrement it once to get the element before this found position
return std::next(iter, -1);
}
}
namespace util {
HeapAllocator::HeapAllocator(const HeapAllocator::HeapResizer& creator) : _heapResizer(creator) {
_heapSize = HEAP_SIZE_INCREASE;
_lastSizeIncraese = HEAP_SIZE_INCREASE;
_heapResizer(_heapSize);
MemoryRange range;
range.offset = 0;
range.size = _heapSize;
addFreeRange(range);
}
size_t HeapAllocator::allocate(size_t size) {
// Do a simple first-fit algorithm. Not very efficient but it should do the job well for our purposes
for (auto iter = _freeRanges.begin(); iter != _freeRanges.end(); ++iter) {
if (iter->size >= size) {
// Found a match!
auto offset = iter->offset;
iter->offset += size;
iter->size -= size;
if (iter->size == 0) {
// This range is now empty and must be removed. Since we won't be using this iterator anymore it can be
// safely erased
_freeRanges.erase(iter);
}
MemoryRange allocated;
allocated.size = size;
allocated.offset = offset;
addAllocatedRange(allocated);
return offset;
}
}
// No free range found => increase size of heap
// We increase the heap size every time we run out of memory in order to reduce reallocation times
_lastSizeIncraese = std::min(HEAP_MAX_INCREASE, 2 * _lastSizeIncraese);
// Make sure that our allocation can actually fit into the new block if its too large for a single increase
auto increase = std::max(HEAP_SIZE_INCREASE, _lastSizeIncraese);
auto lastOffset = _heapSize;
_heapSize += increase;
_heapResizer(_heapSize);
MemoryRange newFree;
newFree.offset = lastOffset;
newFree.size = increase;
addFreeRange(newFree);
// Call us again but this time it should succeed
return allocate(size);
}
void HeapAllocator::addFreeRange(const HeapAllocator::MemoryRange& range) {
if (range.size == 0) {
// Ignore empty ranges
return;
}
Assertion(std::is_sorted(_freeRanges.begin(), _freeRanges.end()),
"Free ranges were not sorted before adding a free range.");
auto left = get_element_before(_freeRanges.begin(), _freeRanges.end(), range);
auto right = std::upper_bound(_freeRanges.begin(), _freeRanges.end(), range);
if (left != _freeRanges.end()) {
// This new range may be just after another
if (left->offset + left->size == range.offset) {
left->size += range.size;
// After this operation the two ranges may be connected. If that happens then they need to be merged as well
if (right != _freeRanges.end() && check_connected_range(*left, *right)) {
left->size += right->size;
_freeRanges.erase(right);
}
checkRangesMerged();
// The range has been added by merging it with another
return;
}
}
if (right != _freeRanges.end()) {
// This new range may be just before another
if (range.offset + range.size == right->offset) {
right->offset = range.offset;
right->size += range.size;
// After this operation the two ranges may be connected. If that happens then they need to be merged as well
if (left != _freeRanges.end() && check_connected_range(*left, *right)) {
left->size += right->size;
_freeRanges.erase(right);
}
checkRangesMerged();
// The range has been added by merging it with another
return;
}
}
// If we are here then we found no range to merge the new range into
_freeRanges.insert(right, range);
checkRangesMerged();
}
void HeapAllocator::addAllocatedRange(const HeapAllocator::MemoryRange& range) {
Assertion(std::is_sorted(_allocatedRanges.begin(), _allocatedRanges.end()), "Allocated ranges are not sorted!");
Assertion(!std::binary_search(_allocatedRanges.begin(), _allocatedRanges.end(), range),
"Allocated ranges already contain the specified range!");
// We need to keep the vector sorted for better search performance so we insert this new range at the sorted position
// into the vector
auto it = std::upper_bound(_allocatedRanges.begin(), _allocatedRanges.end(), range);
// This works even if it is .end() since .insert will just add it to the vector in that case
_allocatedRanges.insert(it, range);
Assertion(std::is_sorted(_allocatedRanges.begin(), _allocatedRanges.end()), "Allocated ranges are not sorted!");
}
void HeapAllocator::free(size_t offset) {
// We use a dummy range for finding the entry in our allocated ranges
MemoryRange range;
range.offset = offset;
auto it = std::lower_bound(_allocatedRanges.begin(), _allocatedRanges.end(), range);
// Make sure that the range is valid
Assertion(it != _allocatedRanges.end(), "Specified offset was not found in the allocated ranges!");
Assertion(it->offset == offset, "Specified offset does not point to the start of an allocated range!");
// Copy the range out of the vector before erasing it so we can add it to the free ranges
auto freedRange = *it;
_allocatedRanges.erase(it);
addFreeRange(freedRange);
}
size_t HeapAllocator::numAllocations() const {
return _allocatedRanges.size();
}
bool HeapAllocator::check_connected_range(const MemoryRange& left, const MemoryRange& right) {
return left.offset + left.size == right.offset;
}
void HeapAllocator::checkRangesMerged() {
// This is a debug only function because its values are only used in debug and linters will get tripped up otherwise.
#ifndef NDEBUG
bool first = true;
size_t lastEnd = 0;
for (auto& range : _freeRanges) {
if (!first) {
Assertion(lastEnd != range.offset, "Found unmerged ranges at offset " SIZE_T_ARG "!", lastEnd);
}
lastEnd = range.offset + range.size;
first = false;
}
#endif
}
bool HeapAllocator::MemoryRange::operator<(const HeapAllocator::MemoryRange& other) const {
return offset < other.offset;
}
bool HeapAllocator::MemoryRange::operator==(const HeapAllocator::MemoryRange& other) const {
return offset == other.offset;
}
}
|