85#ifndef LASS_GUARDIAN_OF_INCLUSION_UTIL_ALLOCATOR_H
86#define LASS_GUARDIAN_OF_INCLUSION_UTIL_ALLOCATOR_H
105 template <
typename Allocator,
typename AllocatorFun>
106 struct IsCompatibleAllocator
108 static meta::True test(AllocatorFun);
109 static meta::False test(...);
110 enum { value = (
sizeof(test(&Allocator::allocate)) ==
sizeof(meta::True)) };
111 typedef typename meta::Bool<value>::Type Type;
117 template <
typename Allocator>
118 struct IsFixedAllocator:
public impl::IsCompatibleAllocator<Allocator, void*(*)()> {};
123 template <
typename Allocator>
124 struct IsVariableAllocator:
public impl::IsCompatibleAllocator<Allocator, void*(*)(size_t)> {};
140 typename VariableAllocator,
141 int destructionPriority = destructionPriorityInternalAllocators
146 static void*
operator new(std::size_t size)
148 void* result = allocator()->allocate(size);
151 throw std::bad_alloc();
156 static void*
operator new(std::size_t size, std::nothrow_t)
noexcept
160 return allocator()->allocate(size);
168 static void*
operator new(std::size_t,
void* mem)
173 static void*
operator new[](std::size_t size)
175 void* result = allocator()->allocate(size);
178 throw std::bad_alloc();
183 static void*
operator new[](std::size_t size, std::nothrow_t)
noexcept
187 return allocator()->allocate(size);
195 static void*
operator new[](std::size_t,
void* mem)
200 static void operator delete(
void* mem, std::size_t size)
202 allocator()->deallocate(mem, size);
205 static void operator delete(
void* mem, std::size_t size, std::nothrow_t)
207 allocator()->deallocate(mem, size);
210 static void operator delete(
void*, std::size_t, std::nothrow_t,
void*)
214 static void operator delete[](
void* mem, std::size_t size)
216 allocator()->deallocate(mem, size);
219 static void operator delete[](
void* mem, std::size_t size, std::nothrow_t)
221 allocator()->deallocate(mem, size);
224 static void operator delete[](
void*, std::size_t, std::nothrow_t,
void*)
230 static VariableAllocator* allocator()
243 static constexpr size_t alignment =
alignof(std::max_align_t);
245 void* allocate(
size_t size)
247 return ::malloc(size);
249 void deallocate(
void *mem)
253 void deallocate(
void* mem,
size_t)
269 typename Locker = util::Locker<Lock>
271class AllocatorLocked:
public Allocator
278 AllocatorLocked(
size_t size):
284 Locker locker(lock_);
285 return Allocator::allocate();
287 void* allocate(
size_t size)
289 Locker locker(lock_);
290 return Allocator::allocate(size);
292 void deallocate(
void* mem)
294 Locker locker(lock_);
295 Allocator::deallocate(mem);
297 void deallocate(
void* mem,
size_t size)
299 Locker locker(lock_);
300 Allocator::deallocate(mem, size);
319class AllocatorPerThread
322 AllocatorPerThread():
326 AllocatorPerThread(
size_t size):
332 return allocator_->allocate();
334 void* allocate(
size_t size)
336 return allocator_->allocate(size);
338 void deallocate(
void* mem)
340 return allocator_->deallocate(mem);
342 void deallocate(
void* mem,
size_t size)
344 return allocator_->deallocate(mem, size);
359 typename FixedAllocator
364 void* allocate(
size_t size)
366 return fixedAllocator(size).allocate();
368 void deallocate(
void* mem,
size_t size)
370 fixedAllocator(size).deallocate(mem);
373 typedef std::map<size_t, FixedAllocator> TFixedAllocators;
375 FixedAllocator& fixedAllocator(
size_t size)
377 typename TFixedAllocators::iterator allocator = fixedAllocators_.find(size);
378 if (allocator == fixedAllocators_.end())
380 allocator = fixedAllocators_.insert(
381 std::make_pair(size, FixedAllocator(size))).first;
383 return allocator->second;
386 TFixedAllocators fixedAllocators_;
397class AllocatorFixed:
private VariableAllocator
400 static constexpr size_t alignment = VariableAllocator::alignment;
402 AllocatorFixed(
size_t size):
408 return VariableAllocator::allocate(size_);
410 void deallocate(
void* mem)
412 VariableAllocator::deallocate(mem, size_);
424 static size_t bin(
size_t size)
426 return size > 0 ? size - 1 : 0;
428 static size_t size(
size_t bin)
438 static size_t bin(
size_t size)
441 while (size > BinnerPower2::size(bin))
447 static size_t size(
size_t bin)
449 return size_t(1) << bin;
455template <
size_t multiple>
458 static size_t bin(
size_t size)
460 return size > 0 ? (size - 1) / multiple : 0;
462 static size_t size(
size_t bin)
464 return (bin + 1) * multiple;
471struct BinnerPadded<0> :
public BinnerOne
478struct BinnerPadded<1> :
public BinnerOne
486 typename FixedAllocator,
487 size_t maxBinSize = 128,
488 typename Binner = BinnerOne,
491class AllocatorBinned:
public VariableAllocator
498 AllocatorBinned(
const AllocatorBinned& other)
500 copyInitFixed(other);
504 destroyFixed(numBins());
506 void* allocate(
size_t size)
508 if (size > maxBinSize)
510 return VariableAllocator::allocate(size);
512 return fixedAllocators_[Binner::bin(size)].allocate();
514 void deallocate(
void* mem,
size_t size)
516 if (size > maxBinSize)
518 VariableAllocator::deallocate(mem, size);
522 fixedAllocators_[Binner::bin(size)].deallocate(mem);
526 AllocatorBinned& operator=(
const AllocatorBinned&);
528 size_t numBins()
const
530 return Binner::bin(maxBinSize) + 1;
534 fixedAllocators_ =
static_cast<FixedAllocator*
>(
535 VariableAllocator::allocate(numBins() *
sizeof(FixedAllocator)));
536 if (!fixedAllocators_)
538 throw std::bad_alloc();
546 const size_t n = numBins();
547 for (
size_t i = 0; i < n; ++i)
551 new(&fixedAllocators_[i]) FixedAllocator(Binner::size(i));
560 void copyInitFixed(
const AllocatorBinned& other)
565 const size_t n = numBins();
566 for (
size_t i = 0; i < n; ++i)
570 new(&fixedAllocators_[i]) FixedAllocator(other.fixedAllocators_[i]);
579 void destroyFixed(
size_t i)
585 fixedAllocators_[--i].~FixedAllocator();
587 VariableAllocator::deallocate(fixedAllocators_, numBins() *
sizeof(FixedAllocator));
590 FixedAllocator* fixedAllocators_;
600 typename FixedAllocator,
603class AllocatorStaticFixed:
public FixedAllocator
606 AllocatorStaticFixed():
621class AllocatorThrow:
public Allocator
628 AllocatorThrow(
size_t size):
634 if (
void* p = Allocator::allocate())
638 throw std::bad_alloc();
640 void* allocate(
size_t size)
642 if (
void* p = Allocator::allocate(size))
646 throw std::bad_alloc();
659class AllocatorNoThrow:
public Allocator
666 AllocatorNoThrow(
size_t size):
674 return Allocator::allocate();
681 void* allocate(
size_t size)
685 return Allocator::allocate(size);
701 typename VariableAllocator,
702 int destructionPriority = destructionPriorityInternalAllocators
709 return allocator()->allocate();
711 void* allocate(
size_t size)
713 return allocator()->allocate(size);
715 void deallocate(
void* mem)
717 return allocator()->deallocate(mem);
719 void deallocate(
void* mem,
size_t size)
721 return allocator()->deallocate(mem, size);
723 VariableAllocator* allocator()
738class AllocatorStats:
public Allocator
745 AllocatorStats(
size_t size):
748 stats_.insert(std::make_pair(size, Stat()));
752 LASS_CLOG <<
"~" <<
typeid(AllocatorStats).name() <<
"()" << std::endl;
753 LASS_CLOG <<
"size: allocations/deallocations" << std::endl;
754 for (
typename TStats::const_iterator i = stats_.begin(); i != stats_.end(); ++i)
756 Stat stat = i->second;
757 LASS_CLOG << i->first <<
": " << stat.allocations <<
"/" << stat.deallocations << std::endl;
758 if (stat.allocations != stat.deallocations)
760 LASS_CERR <<
"[LASS RUN MSG] Allocation unbalance detected in AllocatorStats" << std::endl;
764 void* allocate(
size_t size)
766 ++stats_[size].allocations;
767 return Allocator::allocate(size);
771 ++stats_.begin()->second.allocations;
772 return Allocator::allocate();
774 void deallocate(
void* mem,
size_t size)
776 ++stats_[size].deallocations;
777 Allocator::deallocate(mem, size);
779 void deallocate(
void* mem)
781 ++stats_.begin()->second.deallocations;
782 Allocator::deallocate(mem);
787 unsigned allocations;
788 unsigned deallocations;
790 typedef std::map<size_t, Stat> TStats;
808class AllocatorFreeList:
public FixedAllocator
811 AllocatorFreeList(
size_t iSize):
812 FixedAllocator(std::max<size_t>(
sizeof(AllocationNode),iSize)),
820 AllocationNode* node = top_;
822 FixedAllocator::deallocate(node);
825 AllocatorFreeList(
const AllocatorFreeList& iOther):
826 FixedAllocator(
static_cast<const FixedAllocator&
>(iOther)),
834 return FixedAllocator::allocate();
836 AllocationNode* topNode = top_;
837 top_ = topNode->next;
840 void deallocate(
void* iPointer)
844 AllocationNode* temp =
static_cast<AllocationNode*
>(iPointer);
849 AllocatorFreeList& operator=(AllocatorFreeList&);
851 struct AllocationNode
853 AllocationNode* next;
855 AllocationNode* top_;
871class AllocatorConcurrentFreeList:
public FixedAllocator
874 static constexpr size_t alignment = FixedAllocator::alignment;
876 AllocatorConcurrentFreeList(
size_t size):
877 FixedAllocator(size + alignment),
880#if defined(__cpp_lib_atomic_is_always_lock_free)
881 static_assert(std::atomic<TTaggedPtr>::is_always_lock_free);
883 LASS_ENFORCE(top_.is_lock_free());
886 ~AllocatorConcurrentFreeList()
888 AllocationNode* node = top_.load(std::memory_order_acquire).get();
891 AllocationNode* next = node->next.load(std::memory_order_relaxed);
892 node->~AllocationNode();
893 FixedAllocator::deallocate(node);
897 AllocatorConcurrentFreeList(
const AllocatorConcurrentFreeList& iOther):
898 FixedAllocator(
static_cast<const FixedAllocator&
>(iOther)),
904 TTaggedPtr topNode = top_.load(std::memory_order_acquire);
910 void* p = FixedAllocator::allocate();
911 AllocationNode* node =
new(p) AllocationNode();
914 next = TTaggedPtr(topNode->next.load(std::memory_order_relaxed), topNode.nextTag());
916 while (!top_.compare_exchange_weak(topNode, next));
917 return shift(topNode.get());
919 void deallocate(
void* pointer)
923 AllocationNode* node = unshift(pointer);
924 TTaggedPtr topNode = top_.load(std::memory_order_acquire);
928 node->next.store(topNode.get(), std::memory_order_relaxed);
929 newTop = TTaggedPtr(node, topNode.nextTag());
931 while (!top_.compare_exchange_weak(topNode, newTop));
934 struct AllocationNode
936 std::atomic<AllocationNode*> next;
940 static_assert(
alignof(AllocationNode) ==
sizeof(AllocationNode),
941 "alignof(AllocationNode) == sizeof(AllocationNode)");
942 static_assert(
sizeof(AllocationNode) <= alignment,
943 "FixedAllocator for AllocatorConcurrentFreeList must have minimum alignment of sizeof(AllocationNode)");
945 AllocatorConcurrentFreeList& operator=(
const AllocatorConcurrentFreeList&) =
delete;
947 static void* shift(AllocationNode* node)
949 char* p =
reinterpret_cast<char*
>(node);
950 return p + alignment;
952 static AllocationNode* unshift(
void* pointer)
954 char* p =
static_cast<char*
>(pointer) - alignment;
955 return reinterpret_cast<AllocationNode*
>(p);
958 std::atomic<TTaggedPtr> top_;
975 size_t requestedBlockSize = 8192,
978class AllocatorSimpleBlock:
public FixedAllocator
981 AllocatorSimpleBlock(
size_t size):
982 FixedAllocator(blockSize(size, 0, 0)),
986 blockSize(size, &size_, &allocationsPerBlock_);
988 ~AllocatorSimpleBlock()
992 Node*
const node = blocks_;
993 blocks_ = node->next;
994 FixedAllocator::deallocate(node);
999 if (!pool_ && !grow())
1008 void deallocate(
void* pointer)
1010 Node* p =
static_cast<Node*
>(pointer);
1014 void swap(AllocatorSimpleBlock& other)
1016 std::swap(blocks_, other.blocks_);
1017 std::swap(pool_, other.pool_);
1018 std::swap(size_, other.size_);
1019 std::swap(allocationsPerBlock_, other.allocationsPerBlock_);
1039 char* mem =
static_cast<char*
>(FixedAllocator::allocate());
1044 Node*
const block =
reinterpret_cast<Node*
>(mem);
1045 block->next = blocks_;
1047 mem +=
sizeof(Node);
1048 for (
size_t i = 0; i < allocationsPerBlock_; ++i)
1055 static size_t blockSize(
size_t size,
size_t* pSize,
size_t* pAllocationsPerBlock)
1057 size = std::max(
sizeof(Node), size);
1058 const size_t maxBlockSize = std::max(
sizeof(Node), requestedBlockSize);
1059 const size_t allocationsPerBlock = std::max(
size_t(1), (maxBlockSize -
sizeof(Node)) / size);
1060 const size_t blockSize =
sizeof(Node) + allocationsPerBlock * size;
1061 if (pSize) *pSize = size;
1062 if (pAllocationsPerBlock) *pAllocationsPerBlock = allocationsPerBlock;
1066 AllocatorSimpleBlock(
const AllocatorSimpleBlock&);
1067 AllocatorSimpleBlock& operator=(
const AllocatorSimpleBlock&);
1072 size_t allocationsPerBlock_;
1085 unsigned char Alignment,
1091 static constexpr size_t alignment = Alignment;
1093 void* allocate(
size_t size)
1095 return align(VariableAllocator::allocate(size + alignment));
1097 void deallocate(
void* mem,
size_t size)
1099 VariableAllocator::deallocate(unalign(mem), size + alignment);
1102 void* align(
void* mem)
1104 const num::TuintPtr address =
reinterpret_cast<num::TuintPtr
>(mem);
1105 const unsigned char offset = alignment - (address % alignment);
1106 LASS_ASSERT(offset > 0);
1107 unsigned char*
const aligned =
static_cast<unsigned char*
>(mem) + offset;
1108 *(aligned - 1) = offset;
1111 void* unalign(
void* mem)
1113 unsigned char*
const aligned =
static_cast<unsigned char*
>(mem);
1114 const unsigned char offset = *(aligned - 1);
1115 return aligned - offset;
1134 static constexpr size_t alignment = Alignment;
1136 void* allocate(
size_t size)
1138#if LASS_HAVE_ALIGNED_ALLOC
1139 return ::aligned_alloc(alignment, size);
1140#elif LASS_PLATFORM_TYPE == LASS_PLATFORM_TYPE_WIN32
1141 return _aligned_malloc(size, alignment);
1144 if (posix_memalign(&ptr, alignment, size) != 0)
1149 void deallocate(
void *mem)
1151#if LASS_HAVE_ALIGNED_ALLOC
1153#elif LASS_PLATFORM_TYPE == LASS_PLATFORM_TYPE_WIN32
1159 void deallocate(
void* mem,
size_t)
1175class AllocatorAlignedAlloc<1> :
public AllocatorMalloc
1205 typename VariableAllocator = AllocatorMalloc
1210 void* allocate(
size_t size)
1212 size_t* p =
static_cast<size_t*
>(VariableAllocator::allocate(size +
sizeof(
size_t)));
1216 void deallocate(
void* mem,
size_t size)
1218 size_t* p =
static_cast<size_t*
>(mem) - 1;
1219 LASS_ASSERT(*p == size);
1220 VariableAllocator::deallocate(p, size +
sizeof(
size_t));
1222 void deallocate(
void* mem)
1224 size_t* p =
static_cast<size_t*
>(mem) - 1;
1225 VariableAllocator::deallocate(p, *p +
sizeof(
size_t));
1235class AllocatorObject:
public FixedAllocator
1239 FixedAllocator(sizeof(T))
1244 void* p = FixedAllocator::allocate();
1251 void deallocate(T* p)
1258 FixedAllocator::deallocate(p);
Use an Allocator to implement a class' new and delete.
fixes a variable-size allocator to one size.
A variable-size allocator built on top of a fixed-size allocator.
static TInstance * instance()
Return pointer to singleton instance.
Pointer with a tag for ABA salvationSome lock-free algorithms suffer from the ABA problem when acting...
A primitive to provide Thread Local Storage functionality for a first-citizen class.
#define LASS_CERR
return reference to 'clog' proxy stream of the lass::io::ProxyMan singleton.
#define LASS_CLOG
return reference to 'cerr' proxy stream of the lass::io::ProxyMan singleton.
#define LASS_SIMD_ALIGN
if LASS_SIMD_ALIGNMENT is set, use LASS_SIMD_ALIGN to align some structures on SIMD alignment boundar...
general utility, debug facilities, ...
Library for Assembled Shared Sources.