Library of Assembled Shared Sources
allocator.h
Go to the documentation of this file.
1/** @file
2 * @author Bram de Greve (bram@cocamware.com)
3 * @author Tom De Muer (tom@cocamware.com)
4 *
5 * *** BEGIN LICENSE INFORMATION ***
6 *
7 * The contents of this file are subject to the Common Public Attribution License
8 * Version 1.0 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://lass.sourceforge.net/cpal-license. The License is based on the
11 * Mozilla Public License Version 1.1 but Sections 14 and 15 have been added to cover
12 * use of software over a computer network and provide for limited attribution for
13 * the Original Developer. In addition, Exhibit A has been modified to be consistent
14 * with Exhibit B.
15 *
16 * Software distributed under the License is distributed on an "AS IS" basis, WITHOUT
17 * WARRANTY OF ANY KIND, either express or implied. See the License for the specific
18 * language governing rights and limitations under the License.
19 *
20 * The Original Code is LASS - Library of Assembled Shared Sources.
21 *
22 * The Initial Developer of the Original Code is Bram de Greve and Tom De Muer.
23 * The Original Developer is the Initial Developer.
24 *
25 * All portions of the code written by the Initial Developer are:
26 * Copyright (C) 2004-2024 the Initial Developer.
27 * All Rights Reserved.
28 *
29 * Contributor(s):
30 *
31 * Alternatively, the contents of this file may be used under the terms of the
32 * GNU General Public License Version 2 or later (the GPL), in which case the
33 * provisions of GPL are applicable instead of those above. If you wish to allow use
34 * of your version of this file only under the terms of the GPL and not to allow
35 * others to use your version of this file under the CPAL, indicate your decision by
36 * deleting the provisions above and replace them with the notice and other
37 * provisions required by the GPL License. If you do not delete the provisions above,
38 * a recipient may use your version of this file under either the CPAL or the GPL.
39 *
40 * *** END LICENSE INFORMATION ***
41 */
42
43/** @defgroup Allocator
44 * @brief Custom allocator library
45 *
46 * @section Introduction
47 *
48 * This library provides a nice flexible set of custom allocators you can combine in
49 * anyway you like, as long as it makes sense.
50 *
51 * There are in general two kind of allocator concepts: fixed-size and variable-size
52 * allocators, with the obvious meanings. The former is constructed with the requested
53 * block size and (de)allocates blocks of at least that size. The latter has a default
54 * constructor and the requested block size is passed at _both_ allocation and deallocation.
55 * Some variable-size allocators (like AllocatorMalloc) also provide deallocate(void* mem)
56 * without the block size, but that's not a requirement.
57 *
58 * @code
59 * class FixedSizeAllocator
60 * {
61 * public:
62 * FixedSizeAllocator(size_t size);
63 * void* allocate();
64 * void deallocate(void* mem);
65 * };
66 *
67 * class VariableSizeAllocator
68 * {
69 * VariableSizeAllocator();
70 * void* allocate(size_t size);
71 * void deallocate(void* mem, size_t size);
72 * };
73 * @endcode
74 *
75 * There are bottom-level allocators (like AllocatorMalloc) that do real allocation work
76 * and exist on there own. And there are adaptor allocators (like AllocatorLocked) that
77 * extend/adapt another allocator to provide some extra functionality. Some of them
78 * (like AllocatorLocked) implement both the fixed-size and variable-size concept.
79 * Others (like AllocatorVariableSize) convert a fixed-size interface to variable-size.
80 *
81 * On top of all that, there's AllocatorClassAdaptor you can use to overload class-specific
82 * new and delete using a variable-size allocator.
83 */
84
85#ifndef LASS_GUARDIAN_OF_INCLUSION_UTIL_ALLOCATOR_H
86#define LASS_GUARDIAN_OF_INCLUSION_UTIL_ALLOCATOR_H
87
88#include "util_common.h"
89#include "atomic.h"
90#include "singleton.h"
91#include "thread.h"
92#include "../meta/bool.h"
93
94#include <cstddef>
95
96namespace lass
97{
98namespace util
99{
100namespace impl
101{
102 /** @ingroup Allocator
103 * @internal
104 */
105 template <typename Allocator, typename AllocatorFun>
106 struct IsCompatibleAllocator
107 {
108 static meta::True test(AllocatorFun);
109 static meta::False test(...);
110 enum { value = (sizeof(test(&Allocator::allocate)) == sizeof(meta::True)) };
111 typedef typename meta::Bool<value>::Type Type;
112 };
113
114 /** @ingroup Allocator
115 * @internal
116 */
117 template <typename Allocator>
118 struct IsFixedAllocator: public impl::IsCompatibleAllocator<Allocator, void*(*)()> {};
119
120 /** @ingroup Allocator
121 * @internal
122 */
123 template <typename Allocator>
124 struct IsVariableAllocator: public impl::IsCompatibleAllocator<Allocator, void*(*)(size_t)> {};
125}
126
127
128
129/** Use an Allocator to implement a class' new and delete
130 * @ingroup Allocator
131 *
132 * Derive from this class to implement new and delete based on Allocator.
133 * It uses a singleton of Allocator that is shared per
134 * <Allocator type, destruction priority> pair. So if you have another
135 * class using the same allocator and destruction priority, the allocator
136 * singleton will be shared.
137 */
138template
139<
140 typename VariableAllocator,
141 int destructionPriority = destructionPriorityInternalAllocators
142>
144{
145public:
146 static void* operator new(std::size_t size)
147 {
148 void* result = allocator()->allocate(size);
149 if (!result)
150 {
151 throw std::bad_alloc();
152 }
153 return result;
154 }
155
156 static void* operator new(std::size_t size, std::nothrow_t) noexcept
157 {
158 try
159 {
160 return allocator()->allocate(size);
161 }
162 catch (...)
163 {
164 return 0;
165 }
166 }
167
168 static void* operator new(std::size_t, void* mem)
169 {
170 return mem;
171 }
172
173 static void* operator new[](std::size_t size)
174 {
175 void* result = allocator()->allocate(size);
176 if (!result)
177 {
178 throw std::bad_alloc();
179 }
180 return result;
181 }
182
183 static void* operator new[](std::size_t size, std::nothrow_t) noexcept
184 {
185 try
186 {
187 return allocator()->allocate(size);
188 }
189 catch (...)
190 {
191 return 0;
192 }
193 }
194
195 static void* operator new[](std::size_t, void* mem)
196 {
197 return mem;
198 }
199
200 static void operator delete(void* mem, std::size_t size)
201 {
202 allocator()->deallocate(mem, size);
203 }
204
205 static void operator delete(void* mem, std::size_t size, std::nothrow_t)
206 {
207 allocator()->deallocate(mem, size);
208 }
209
210 static void operator delete(void*, std::size_t, std::nothrow_t, void*)
211 {
212 }
213
214 static void operator delete[](void* mem, std::size_t size)
215 {
216 allocator()->deallocate(mem, size);
217 }
218
219 static void operator delete[](void* mem, std::size_t size, std::nothrow_t)
220 {
221 allocator()->deallocate(mem, size);
222 }
223
224 static void operator delete[](void*, std::size_t, std::nothrow_t, void*)
225 {
226 }
227
228private:
229
230 static VariableAllocator* allocator()
231 {
233 }
234};
235
236
237/** @ingroup Allocator
238 * @arg concept: VariableAllocator
239 */
241{
242public:
243 static constexpr size_t alignment = alignof(std::max_align_t);
244
245 void* allocate(size_t size)
246 {
247 return ::malloc(size);
248 }
249 void deallocate(void *mem)
250 {
251 ::free(mem);
252 }
253 void deallocate(void* mem, size_t)
254 {
255 ::free(mem);
256 }
257};
258
259
260
261/** Guard a MT unsafe allocator with some lock.
262 * @ingroup Allocator
263 * @arg concept: VariableAllocator or FixedAllocator, depending on template argument
264 */
265template
266<
267 typename Allocator,
268 typename Lock,
269 typename Locker = util::Locker<Lock>
270>
271class AllocatorLocked: public Allocator
272{
273public:
274 AllocatorLocked():
275 Allocator()
276 {
277 }
278 AllocatorLocked(size_t size):
279 Allocator(size)
280 {
281 }
282 void* allocate()
283 {
284 Locker locker(lock_);
285 return Allocator::allocate();
286 }
287 void* allocate(size_t size)
288 {
289 Locker locker(lock_);
290 return Allocator::allocate(size);
291 }
292 void deallocate(void* mem)
293 {
294 Locker locker(lock_);
295 Allocator::deallocate(mem);
296 }
297 void deallocate(void* mem, size_t size)
298 {
299 Locker locker(lock_);
300 Allocator::deallocate(mem, size);
301 }
302private:
303 Lock lock_;
304};
305
306
307
308/** Instantiates an Allocator per thread.
309 * @ingroup Allocator
310 * @arg concept: VariableAllocator or FixedAllocator, depending on template argument
311 * @arg requirements: Allocator must be copyconstructible
312 * @warning it is potentially unsafe to allocate and deallocate memory in two different threads
313 * if you're using this one! This depends on underlying Allocator
314 */
315template
316<
317 typename Allocator
318>
319class AllocatorPerThread
320{
321public:
322 AllocatorPerThread():
323 allocator_()
324 {
325 }
326 AllocatorPerThread(size_t size):
327 allocator_(size)
328 {
329 }
330 void* allocate()
331 {
332 return allocator_->allocate();
333 }
334 void* allocate(size_t size)
335 {
336 return allocator_->allocate(size);
337 }
338 void deallocate(void* mem)
339 {
340 return allocator_->deallocate(mem);
341 }
342 void deallocate(void* mem, size_t size)
343 {
344 return allocator_->deallocate(mem, size);
345 }
346private:
348};
349
350
351
352/** A variable-size allocator built on top of a fixed-size allocator
353 * @ingroup Allocator
354 * @arg concept: VariableAllocator
355 * @warning CURRENTLY THREAD UNSAFE!
356 */
357template
358<
359 typename FixedAllocator
360>
362{
363public:
364 void* allocate(size_t size)
365 {
366 return fixedAllocator(size).allocate();
367 }
368 void deallocate(void* mem, size_t size)
369 {
370 fixedAllocator(size).deallocate(mem);
371 }
372private:
373 typedef std::map<size_t, FixedAllocator> TFixedAllocators;
374
375 FixedAllocator& fixedAllocator(size_t size)
376 {
377 typename TFixedAllocators::iterator allocator = fixedAllocators_.find(size);
378 if (allocator == fixedAllocators_.end())
379 {
380 allocator = fixedAllocators_.insert(
381 std::make_pair(size, FixedAllocator(size))).first;
382 }
383 return allocator->second;
384 }
385
386 TFixedAllocators fixedAllocators_;
387};
388
389
390
391/** fixes a variable-size allocator to one size.
392 */
393template
394<
395 typename VariableAllocator = AllocatorMalloc
396>
397class AllocatorFixed: private VariableAllocator
398{
399public:
400 static constexpr size_t alignment = VariableAllocator::alignment;
401
402 AllocatorFixed(size_t size):
403 size_(size)
404 {
405 }
406 void* allocate()
407 {
408 return VariableAllocator::allocate(size_);
409 }
410 void deallocate(void* mem)
411 {
412 VariableAllocator::deallocate(mem, size_);
413 }
414private:
415 size_t size_;
416};
417
418
419
420/** @ingroup Allocator
421 */
422struct BinnerOne
423{
424 static size_t bin(size_t size)
425 {
426 return size > 0 ? size - 1 : 0;
427 }
428 static size_t size(size_t bin)
429 {
430 return bin + 1;
431 }
432};
433
434/** @ingroup Allocator
435 */
436struct BinnerPower2
437{
438 static size_t bin(size_t size)
439 {
440 size_t bin = 0;
441 while (size > BinnerPower2::size(bin))
442 {
443 ++bin;
444 }
445 return bin;
446 }
447 static size_t size(size_t bin)
448 {
449 return size_t(1) << bin;
450 }
451};
452
453/** @ingroup Allocator
454 */
455template <size_t multiple>
456struct BinnerPadded
457{
458 static size_t bin(size_t size)
459 {
460 return size > 0 ? (size - 1) / multiple : 0;
461 }
462 static size_t size(size_t bin)
463 {
464 return (bin + 1) * multiple;
465 }
466};
467
468/** @ingroup Allocator
469*/
470template <>
471struct BinnerPadded<0> : public BinnerOne
472{
473};
474
475/** @ingroup Allocator
476*/
477template <>
478struct BinnerPadded<1> : public BinnerOne
479{
480};
481
482/** @ingroup Allocator
483 */
484template
485<
486 typename FixedAllocator,
487 size_t maxBinSize = 128,
488 typename Binner = BinnerOne,
489 typename VariableAllocator = AllocatorMalloc
490>
491class AllocatorBinned: public VariableAllocator
492{
493public:
494 AllocatorBinned()
495 {
496 initFixed();
497 }
498 AllocatorBinned(const AllocatorBinned& other)
499 {
500 copyInitFixed(other);
501 }
502 ~AllocatorBinned()
503 {
504 destroyFixed(numBins());
505 }
506 void* allocate(size_t size)
507 {
508 if (size > maxBinSize)
509 {
510 return VariableAllocator::allocate(size);
511 }
512 return fixedAllocators_[Binner::bin(size)].allocate();
513 }
514 void deallocate(void* mem, size_t size)
515 {
516 if (size > maxBinSize)
517 {
518 VariableAllocator::deallocate(mem, size);
519 }
520 else
521 {
522 fixedAllocators_[Binner::bin(size)].deallocate(mem);
523 }
524 }
525private:
526 AllocatorBinned& operator=(const AllocatorBinned&);
527
528 size_t numBins() const
529 {
530 return Binner::bin(maxBinSize) + 1;
531 }
532 void allocFixed()
533 {
534 fixedAllocators_ = static_cast<FixedAllocator*>(
535 VariableAllocator::allocate(numBins() * sizeof(FixedAllocator)));
536 if (!fixedAllocators_)
537 {
538 throw std::bad_alloc();
539 }
540 }
541 void initFixed()
542 {
543 // Kids, don't try this at home. We're trained professionals here! ;)
544 //
545 allocFixed();
546 const size_t n = numBins();
547 for (size_t i = 0; i < n; ++i)
548 {
549 try
550 {
551 new(&fixedAllocators_[i]) FixedAllocator(Binner::size(i));
552 }
553 catch (...)
554 {
555 destroyFixed(i);
556 throw;
557 }
558 }
559 }
560 void copyInitFixed(const AllocatorBinned& other)
561 {
562 // Kids, don't try this either.
563 //
564 allocFixed();
565 const size_t n = numBins();
566 for (size_t i = 0; i < n; ++i)
567 {
568 try
569 {
570 new(&fixedAllocators_[i]) FixedAllocator(other.fixedAllocators_[i]);
571 }
572 catch (...)
573 {
574 destroyFixed(i);
575 throw;
576 }
577 }
578 }
579 void destroyFixed(size_t i)
580 {
581 // and neither this.
582 //
583 while (i > 0)
584 {
585 fixedAllocators_[--i].~FixedAllocator();
586 }
587 VariableAllocator::deallocate(fixedAllocators_, numBins() * sizeof(FixedAllocator));
588 }
589
590 FixedAllocator* fixedAllocators_;
591};
592
593
594
595/** @ingroup Allocator
596 * @arg concept: FixedAllocator except for the constructor.
597 */
598template
599<
600 typename FixedAllocator,
601 size_t size
602>
603class AllocatorStaticFixed: public FixedAllocator
604{
605public:
606 AllocatorStaticFixed():
607 FixedAllocator(size)
608 {
609 }
610};
611
612
613
614/** @ingroup Allocator
615 * @arg concept: same as Allocator
616 */
617template
618<
619 typename Allocator
620>
621class AllocatorThrow: public Allocator
622{
623public:
624 AllocatorThrow():
625 Allocator()
626 {
627 }
628 AllocatorThrow(size_t size):
629 Allocator(size)
630 {
631 }
632 void* allocate()
633 {
634 if (void* p = Allocator::allocate())
635 {
636 return p;
637 }
638 throw std::bad_alloc();
639 }
640 void* allocate(size_t size)
641 {
642 if (void* p = Allocator::allocate(size))
643 {
644 return p;
645 }
646 throw std::bad_alloc();
647 }
648};
649
650
651
652/** @ingroup Allocator
653 * @arg concept: same as Allocator
654 */
655template
656<
657 typename Allocator
658>
659class AllocatorNoThrow: public Allocator
660{
661public:
662 AllocatorNoThrow():
663 Allocator()
664 {
665 }
666 AllocatorNoThrow(size_t size):
667 Allocator(size)
668 {
669 }
670 void* allocate()
671 {
672 try
673 {
674 return Allocator::allocate();
675 }
676 catch (...)
677 {
678 return 0;
679 }
680 }
681 void* allocate(size_t size)
682 {
683 try
684 {
685 return Allocator::allocate(size);
686 }
687 catch (...)
688 {
689 return 0;
690 }
691 }
692};
693
694
695
696/** @ingroup Allocator
697 * @arg concept: same as Allocator, except for the constructor if Allocator == FixedAllocator.
698 */
699template
700<
701 typename VariableAllocator,
702 int destructionPriority = destructionPriorityInternalAllocators
703>
705{
706public:
707 void* allocate()
708 {
709 return allocator()->allocate();
710 }
711 void* allocate(size_t size)
712 {
713 return allocator()->allocate(size);
714 }
715 void deallocate(void* mem)
716 {
717 return allocator()->deallocate(mem);
718 }
719 void deallocate(void* mem, size_t size)
720 {
721 return allocator()->deallocate(mem, size);
722 }
723 VariableAllocator* allocator()
724 {
726 }
727};
728
729
730
731/** @ingroup Allocator
732 * @warning THREAD UNSAFE!
733 */
734template
735<
736 typename Allocator
737>
738class AllocatorStats: public Allocator
739{
740public:
741 AllocatorStats():
742 Allocator()
743 {
744 }
745 AllocatorStats(size_t size):
746 Allocator(size)
747 {
748 stats_.insert(std::make_pair(size, Stat()));
749 }
750 ~AllocatorStats()
751 {
752 LASS_CLOG << "~" << typeid(AllocatorStats).name() << "()" << std::endl;
753 LASS_CLOG << "size: allocations/deallocations" << std::endl;
754 for (typename TStats::const_iterator i = stats_.begin(); i != stats_.end(); ++i)
755 {
756 Stat stat = i->second;
757 LASS_CLOG << i->first << ": " << stat.allocations << "/" << stat.deallocations << std::endl;
758 if (stat.allocations != stat.deallocations)
759 {
760 LASS_CERR << "[LASS RUN MSG] Allocation unbalance detected in AllocatorStats" << std::endl;
761 }
762 }
763 }
764 void* allocate(size_t size)
765 {
766 ++stats_[size].allocations;
767 return Allocator::allocate(size);
768 }
769 void* allocate()
770 {
771 ++stats_.begin()->second.allocations;
772 return Allocator::allocate();
773 }
774 void deallocate(void* mem, size_t size)
775 {
776 ++stats_[size].deallocations;
777 Allocator::deallocate(mem, size);
778 }
779 void deallocate(void* mem)
780 {
781 ++stats_.begin()->second.deallocations;
782 Allocator::deallocate(mem);
783 }
784private:
785 struct Stat
786 {
787 unsigned allocations;
788 unsigned deallocations;
789 };
790 typedef std::map<size_t, Stat> TStats;
791 TStats stats_;
792};
793
794
795
796
797
798/** A fixed-size free-list allocator
799 * @ingroup Allocator
800 * @arg concept: FixedAllocator
801 * @arg thread UNSAFE
802 * @arg copy-constructible, not assignable
803 */
804template
805<
806 typename FixedAllocator = AllocatorFixed<AllocatorMalloc>
807>
808class AllocatorFreeList: public FixedAllocator
809{
810public:
811 AllocatorFreeList(size_t iSize):
812 FixedAllocator(std::max<size_t>(sizeof(AllocationNode),iSize)),
813 top_(0)
814 {
815 }
816 ~AllocatorFreeList()
817 {
818 while (top_)
819 {
820 AllocationNode* node = top_;
821 top_ = node->next;
822 FixedAllocator::deallocate(node);
823 }
824 }
825 AllocatorFreeList(const AllocatorFreeList& iOther):
826 FixedAllocator(static_cast<const FixedAllocator&>(iOther)),
827 top_(0)
828 {
829 }
830 void* allocate()
831 {
832 if (!top_)
833 {
834 return FixedAllocator::allocate();
835 }
836 AllocationNode* topNode = top_;
837 top_ = topNode->next;
838 return topNode;
839 }
840 void deallocate(void* iPointer)
841 {
842 if (!iPointer)
843 return;
844 AllocationNode* temp = static_cast<AllocationNode*>(iPointer);
845 temp->next = top_;
846 top_ = temp;
847 }
848private:
849 AllocatorFreeList& operator=(AllocatorFreeList&);
850
851 struct AllocationNode
852 {
853 AllocationNode* next;
854 };
855 AllocationNode* top_;
856};
857
858
859
860
861/** A fixed-size lock-free free-list allocator
862 * @ingroup Allocator
863 * @arg concept: FixedAllocator
864 * @arg thread safe, lock-free
865 * @arg copy-constructible, not assignable
866 */
867template
868<
869 typename FixedAllocator = AllocatorFixed<AllocatorMalloc>
870>
871class AllocatorConcurrentFreeList: public FixedAllocator
872{
873public:
874 static constexpr size_t alignment = FixedAllocator::alignment;
875
876 AllocatorConcurrentFreeList(size_t size):
877 FixedAllocator(size + alignment),
878 top_()
879 {
880#if defined(__cpp_lib_atomic_is_always_lock_free)
881 static_assert(std::atomic<TTaggedPtr>::is_always_lock_free);
882#else
883 LASS_ENFORCE(top_.is_lock_free());
884#endif
885 }
886 ~AllocatorConcurrentFreeList()
887 {
888 AllocationNode* node = top_.load(std::memory_order_acquire).get();
889 while (node)
890 {
891 AllocationNode* next = node->next.load(std::memory_order_relaxed);
892 node->~AllocationNode();
893 FixedAllocator::deallocate(node);
894 node = next;
895 }
896 }
897 AllocatorConcurrentFreeList(const AllocatorConcurrentFreeList& iOther):
898 FixedAllocator(static_cast<const FixedAllocator&>(iOther)),
899 top_()
900 {
901 }
902 void* allocate()
903 {
904 TTaggedPtr topNode = top_.load(std::memory_order_acquire);
905 TTaggedPtr next;
906 do
907 {
908 if (!topNode)
909 {
910 void* p = FixedAllocator::allocate();
911 AllocationNode* node = new(p) AllocationNode();
912 return shift(node);
913 }
914 next = TTaggedPtr(topNode->next.load(std::memory_order_relaxed), topNode.nextTag());
915 }
916 while (!top_.compare_exchange_weak(topNode, next));
917 return shift(topNode.get());
918 }
919 void deallocate(void* pointer)
920 {
921 if (!pointer)
922 return;
923 AllocationNode* node = unshift(pointer);
924 TTaggedPtr topNode = top_.load(std::memory_order_acquire);
925 TTaggedPtr newTop;
926 do
927 {
928 node->next.store(topNode.get(), std::memory_order_relaxed);
929 newTop = TTaggedPtr(node, topNode.nextTag());
930 }
931 while (!top_.compare_exchange_weak(topNode, newTop));
932 }
933private:
934 struct AllocationNode
935 {
936 std::atomic<AllocationNode*> next;
937 };
938 typedef util::TaggedPtr<AllocationNode> TTaggedPtr;
939
940 static_assert(alignof(AllocationNode) == sizeof(AllocationNode),
941 "alignof(AllocationNode) == sizeof(AllocationNode)");
942 static_assert(sizeof(AllocationNode) <= alignment,
943 "FixedAllocator for AllocatorConcurrentFreeList must have minimum alignment of sizeof(AllocationNode)");
944
945 AllocatorConcurrentFreeList& operator=(const AllocatorConcurrentFreeList&) = delete;
946
947 static void* shift(AllocationNode* node)
948 {
949 char* p = reinterpret_cast<char*>(node);
950 return p + alignment;
951 }
952 static AllocationNode* unshift(void* pointer)
953 {
954 char* p = static_cast<char*>(pointer) - alignment;
955 return reinterpret_cast<AllocationNode*>(p);
956 }
957
958 std::atomic<TTaggedPtr> top_;
959};
960
961
962
963/** A simple fixed-size block allocator
964 * @ingroup Allocator
965 * @arg concept: FixedAllocator
966 * @arg thread UNSAFE.
967 * @arg not copy-constructible, not assignable
968 * @arg The blocks are only deallocated at destruction time to simplify the allocation/deallocation logic.
969 * However, nodes that are deallocate can still be reused.
970 * @arg It is NOT SAFE to deallocate memory using another instance of AllocatorSimpleBlock than the one
971 * used to allocate it.
972 */
973template
974<
975 size_t requestedBlockSize = 8192,
976 typename FixedAllocator = AllocatorFixed<AllocatorMalloc>
977>
978class AllocatorSimpleBlock: public FixedAllocator
979{
980public:
981 AllocatorSimpleBlock(size_t size):
982 FixedAllocator(blockSize(size, 0, 0)),
983 blocks_(0),
984 pool_(0)
985 {
986 blockSize(size, &size_, &allocationsPerBlock_);
987 }
988 ~AllocatorSimpleBlock()
989 {
990 while (blocks_)
991 {
992 Node* const node = blocks_;
993 blocks_ = node->next;
994 FixedAllocator::deallocate(node);
995 }
996 }
997 void* allocate()
998 {
999 if (!pool_ && !grow())
1000 {
1001 return 0;
1002 }
1003 LASS_ASSERT(pool_);
1004 Node* p = pool_;
1005 pool_ = p->next;
1006 return p;
1007 }
1008 void deallocate(void* pointer)
1009 {
1010 Node* p = static_cast<Node*>(pointer);
1011 p->next = pool_;
1012 pool_ = p;
1013 }
1014 void swap(AllocatorSimpleBlock& other)
1015 {
1016 std::swap(blocks_, other.blocks_);
1017 std::swap(pool_, other.pool_);
1018 std::swap(size_, other.size_);
1019 std::swap(allocationsPerBlock_, other.allocationsPerBlock_);
1020 }
1021
1022private:
1023
1024 // If LASS_SIMD_ALIGNMENT, make sure Node has the right size so that that
1025 // at least the first allocation in the block is SIMD aligned (and all the
1026 // others too iff they have the right size).
1027 //
1028 // So, SIMD align Node to make it so.
1029 //
1030 // This does assume FixedAllocator returns SIMD aligned allocations!
1031 //
1032 struct LASS_SIMD_ALIGN Node
1033 {
1034 Node* next;
1035 };
1036
1037 bool grow()
1038 {
1039 char* mem = static_cast<char*>(FixedAllocator::allocate());
1040 if (!mem)
1041 {
1042 return false;
1043 }
1044 Node* const block = reinterpret_cast<Node*>(mem);
1045 block->next = blocks_;
1046 blocks_ = block;
1047 mem += sizeof(Node);
1048 for (size_t i = 0; i < allocationsPerBlock_; ++i)
1049 {
1050 deallocate(mem); // pushes in pool
1051 mem += size_;
1052 }
1053 return true;
1054 }
1055 static size_t blockSize(size_t size, size_t* pSize, size_t* pAllocationsPerBlock)
1056 {
1057 size = std::max(sizeof(Node), size);
1058 const size_t maxBlockSize = std::max(sizeof(Node), requestedBlockSize);
1059 const size_t allocationsPerBlock = std::max(size_t(1), (maxBlockSize - sizeof(Node)) / size);
1060 const size_t blockSize = sizeof(Node) + allocationsPerBlock * size;
1061 if (pSize) *pSize = size;
1062 if (pAllocationsPerBlock) *pAllocationsPerBlock = allocationsPerBlock;
1063 return blockSize;
1064 }
1065
1066 AllocatorSimpleBlock(const AllocatorSimpleBlock&);
1067 AllocatorSimpleBlock& operator=(const AllocatorSimpleBlock&);
1068
1069 Node* blocks_;
1070 Node* pool_;
1071 size_t size_;
1072 size_t allocationsPerBlock_;
1073};
1074
1075
1076
1077/** @ingroup Allocator
1078 * @arg concept: VariableAllocator
1079 *
1080 * AllocatorAligned adds @a Alignment bytes to the requested block size to be able to shift
1081 * the pointer to a alignment boundary.
1082 */
1083template
1084<
1085 unsigned char Alignment,
1086 typename VariableAllocator = AllocatorMalloc
1087>
1088class AllocatorAligned: public VariableAllocator
1089{
1090public:
1091 static constexpr size_t alignment = Alignment;
1092
1093 void* allocate(size_t size)
1094 {
1095 return align(VariableAllocator::allocate(size + alignment));
1096 }
1097 void deallocate(void* mem, size_t size)
1098 {
1099 VariableAllocator::deallocate(unalign(mem), size + alignment);
1100 }
1101private:
1102 void* align(void* mem)
1103 {
1104 const num::TuintPtr address = reinterpret_cast<num::TuintPtr>(mem);
1105 const unsigned char offset = alignment - (address % alignment);
1106 LASS_ASSERT(offset > 0);
1107 unsigned char* const aligned = static_cast<unsigned char*>(mem) + offset;
1108 *(aligned - 1) = offset;
1109 return aligned;
1110 }
1111 void* unalign(void* mem)
1112 {
1113 unsigned char* const aligned = static_cast<unsigned char*>(mem);
1114 const unsigned char offset = *(aligned - 1);
1115 return aligned - offset;
1116 }
1117};
1118
1119
1120
1121/** @ingroup Allocator
1122* @arg concept: VariableAllocator
1123*
1124* AllocatorAligned adds @a align bytes to the requested block size to be able to shift
1125* the pointer to a alignment boundary.
1126*/
1127template
1128<
1129 size_t Alignment
1130>
1132{
1133public:
1134 static constexpr size_t alignment = Alignment;
1135
1136 void* allocate(size_t size)
1137 {
1138#if LASS_HAVE_ALIGNED_ALLOC
1139 return ::aligned_alloc(alignment, size);
1140#elif LASS_PLATFORM_TYPE == LASS_PLATFORM_TYPE_WIN32
1141 return _aligned_malloc(size, alignment);
1142#else
1143 void* ptr = 0;
1144 if (posix_memalign(&ptr, alignment, size) != 0)
1145 return 0;
1146 return ptr;
1147#endif
1148 }
1149 void deallocate(void *mem)
1150 {
1151#if LASS_HAVE_ALIGNED_ALLOC
1152 ::free(mem);
1153#elif LASS_PLATFORM_TYPE == LASS_PLATFORM_TYPE_WIN32
1154 _aligned_free(mem);
1155#else
1156 ::free(mem);
1157#endif
1158 }
1159 void deallocate(void* mem, size_t)
1160 {
1161 deallocate(mem);
1162 }
1163};
1164
1165/** @ingroup Allocator
1166*/
1167template<>
1168class AllocatorAlignedAlloc<0> : public AllocatorMalloc
1169{
1170};
1171
1172/** @ingroup Allocator
1173*/
1174template<>
1175class AllocatorAlignedAlloc<1> : public AllocatorMalloc
1176{
1177};
1178
1179
1180
1181/** @ingroup Allocator
1182 * @arg concept: VariableAllocator
1183 *
1184 * AllocatorSized adds sizeof(size_t) bytes in front of the returned pointer to store the
1185 * requested block size. Because of that, you can use the simpler form
1186 * <tt>void deallocate(void* mem)</tt> to deallocate the memory.
1187 *
1188 * @warning the fact that this allocator supports <tt>void deallocate(void* mem)</tt> does
1189 * @b NOT mean it implements the FixedAllocator concept, not even partially. So when
1190 * you use AllocatorSized as a base for an allocator that implements both the
1191 * VariableAllocator as FixedAllocator concept depending on the base allocator (such as
1192 * AllocatorStats), the result will be a VariableAllocator. You cannot generally assume
1193 * you will be able to use <tt>void deallocate(void* mem)</tt> on the resulting allocator
1194 * as well. Particularly for AllocatorStats, it is know that this will cause undefined
1195 * behaviour. However, using AllocatorStats as base AllocatorSized will achieve the
1196 * desired effect perfectly well, and will cause no problem (= using AllocatorSized
1197 * on top-level)
1198 *
1199 * <b>it is advised to use AllocatorSized as a top-level allocator. Using AllocatorSized
1200 * as a base allocator for others can cause undefined behaviour. A notable exception is
1201 * AllocatorSingleton, it is safe to use that one on top of AllocatorSized.</b>
1202 */
1203template
1204<
1205 typename VariableAllocator = AllocatorMalloc
1206>
1207class AllocatorSized: public VariableAllocator
1208{
1209public:
1210 void* allocate(size_t size)
1211 {
1212 size_t* p = static_cast<size_t*>(VariableAllocator::allocate(size + sizeof(size_t)));
1213 *p = size;
1214 return p + 1;
1215 }
1216 void deallocate(void* mem, size_t size)
1217 {
1218 size_t* p = static_cast<size_t*>(mem) - 1;
1219 LASS_ASSERT(*p == size);
1220 VariableAllocator::deallocate(p, size + sizeof(size_t));
1221 }
1222 void deallocate(void* mem)
1223 {
1224 size_t* p = static_cast<size_t*>(mem) - 1;
1225 VariableAllocator::deallocate(p, *p + sizeof(size_t));
1226 }
1227};
1228
1229
1230template
1231<
1232 typename T,
1233 typename FixedAllocator = AllocatorThrow< AllocatorFixed<> >
1234>
1235class AllocatorObject: public FixedAllocator
1236{
1237public:
1238 AllocatorObject():
1239 FixedAllocator(sizeof(T))
1240 {
1241 }
1242 T* allocate()
1243 {
1244 void* p = FixedAllocator::allocate();
1245 if (!p)
1246 {
1247 return 0;
1248 }
1249 return new (p) T;
1250 }
1251 void deallocate(T* p)
1252 {
1253 if (!p)
1254 {
1255 return;
1256 }
1257 p->~T();
1258 FixedAllocator::deallocate(p);
1259 }
1260};
1261
1262}
1263}
1264
1265#endif
1266
1267// EOF
Use an Allocator to implement a class' new and delete.
Definition allocator.h:144
fixes a variable-size allocator to one size.
Definition allocator.h:398
A variable-size allocator built on top of a fixed-size allocator.
Definition allocator.h:362
static TInstance * instance()
Return pointer to singleton instance.
Definition singleton.inl:82
Pointer with a tag for ABA salvationSome lock-free algorithms suffer from the ABA problem when acting...
Definition atomic.h:306
A primitive to provide Thread Local Storage functionality for a first-citizen class.
Definition thread.h:271
#define LASS_CERR
return reference to 'clog' proxy stream of the lass::io::ProxyMan singleton.
Definition io_fwd.h:77
#define LASS_CLOG
return reference to 'cerr' proxy stream of the lass::io::ProxyMan singleton.
Definition io_fwd.h:73
#define LASS_SIMD_ALIGN
if LASS_SIMD_ALIGNMENT is set, use LASS_SIMD_ALIGN to align some structures on SIMD alignment boundar...
general utility, debug facilities, ...
Library for Assembled Shared Sources.
Definition config.h:53