library of assembled shared sources

http://lass.cocamware.com

allocator.h

Go to the documentation of this file.
00001 /** @file
00002  *  @author Bram de Greve (bramz@users.sourceforge.net)
00003  *  @author Tom De Muer (tomdemuer@users.sourceforge.net)
00004  *
00005  *  *** BEGIN LICENSE INFORMATION ***
00006  *  
00007  *  The contents of this file are subject to the Common Public Attribution License 
00008  *  Version 1.0 (the "License"); you may not use this file except in compliance with 
00009  *  the License. You may obtain a copy of the License at 
00010  *  http://lass.sourceforge.net/cpal-license. The License is based on the 
00011  *  Mozilla Public License Version 1.1 but Sections 14 and 15 have been added to cover 
00012  *  use of software over a computer network and provide for limited attribution for 
00013  *  the Original Developer. In addition, Exhibit A has been modified to be consistent 
00014  *  with Exhibit B.
00015  *  
00016  *  Software distributed under the License is distributed on an "AS IS" basis, WITHOUT 
00017  *  WARRANTY OF ANY KIND, either express or implied. See the License for the specific 
00018  *  language governing rights and limitations under the License.
00019  *  
00020  *  The Original Code is LASS - Library of Assembled Shared Sources.
00021  *  
00022  *  The Initial Developer of the Original Code is Bram de Greve and Tom De Muer.
00023  *  The Original Developer is the Initial Developer.
00024  *  
00025  *  All portions of the code written by the Initial Developer are:
00026  *  Copyright (C) 2004-2007 the Initial Developer.
00027  *  All Rights Reserved.
00028  *  
00029  *  Contributor(s):
00030  *
00031  *  Alternatively, the contents of this file may be used under the terms of the 
00032  *  GNU General Public License Version 2 or later (the GPL), in which case the 
00033  *  provisions of GPL are applicable instead of those above.  If you wish to allow use
00034  *  of your version of this file only under the terms of the GPL and not to allow 
00035  *  others to use your version of this file under the CPAL, indicate your decision by 
00036  *  deleting the provisions above and replace them with the notice and other 
00037  *  provisions required by the GPL License. If you do not delete the provisions above,
00038  *  a recipient may use your version of this file under either the CPAL or the GPL.
00039  *  
00040  *  *** END LICENSE INFORMATION ***
00041  */
00042 
00043 /** @defgroup Allocator
00044  *  @brief Custom allocator library
00045  *
00046  *  @section Introduction
00047  *
00048  *  This library provides a nice flexible set of custom allocators you can combine in
00049  *  anyway you like, as long as it makes sense.
00050  *
00051  *  There are in general two kind of allocator concepts: fixed-size and variable-size 
00052  *  allocators, with the obvious meanings.  The former is constructed with the requested 
00053  *  block size and (de)allocates blocks of at least that size.  The latter has a default 
00054  *  constructor and the requested block size is passed at _both_ allocation and deallocation.
00055  *  Some variable-size allocators (like AllocatorMalloc) also provide deallocate(void* mem) 
00056  *  without the block size, but that's not a requirement.
00057  *
00058  *  @code
00059  *  class FixedSizeAllocator
00060  *  {
00061  *  public:
00062  *      FixedSizeAllocator(size_t size);
00063  *      void* allocate();
00064  *      void deallocate(void* mem);
00065  *  };
00066  *
00067  *  class VariableSizeAllocator
00068  *  {
00069  *      VariableSizeAllocator();
00070  *      void* allocate(size_t size);
00071  *      void deallocate(void* mem, size_t size);
00072  *  };
00073  *  @endcode
00074  *
00075  *  There are bottom-level allocators (like AllocatorMalloc) that do real allocation work
00076  *  and exist on there own.  And there are adaptor allocators (like AllocatorLocked) that
00077  *  extend/adapt another allocator to provide some extra functionality.  Some of them
00078  *  (like AllocatorLocked) implement both the fixed-size and variable-size concept.
00079  *  Others (like AllocatorVariableSize) convert a fixed-size interface to variable-size.
00080  *
00081  *  On top of all that, there's AllocatorClassAdaptor you can use to overload class-specific
00082  *  new and delete using a variable-size allocator.
00083  */
00084 
00085 #ifndef LASS_GUARDIAN_OF_INCLUSION_UTIL_ALLOCATOR_H
00086 #define LASS_GUARDIAN_OF_INCLUSION_UTIL_ALLOCATOR_H
00087 
00088 #include "util_common.h"
00089 #include "atomic.h"
00090 #include "singleton.h"
00091 #include "thread.h"
00092 #include "../meta/bool.h"
00093 #include "../num/safe_bool.h"
00094 
00095 namespace lass
00096 {
00097 namespace util
00098 {
00099 namespace impl
00100 {
00101     /** @ingroup Allocator
00102      *  @internal
00103      */
00104     template <typename Allocator, typename AllocatorFun>
00105     struct IsCompatibleAllocator
00106     {
00107         static meta::True test(AllocatorFun);
00108         static meta::False test(...);
00109         enum { value = (sizeof(test(&Allocator::allocate)) == sizeof(meta::True)) };
00110         typedef typename meta::Bool<value>::Type Type;
00111     };
00112 
00113     /** @ingroup Allocator
00114      *  @internal
00115      */
00116     template <typename Allocator>
00117     struct IsFixedAllocator: public impl::IsCompatibleAllocator<Allocator, void*(*)()> {};
00118     
00119     /** @ingroup Allocator
00120      *  @internal
00121      */
00122     template <typename Allocator>
00123     struct IsVariableAllocator: public impl::IsCompatibleAllocator<Allocator, void*(*)(size_t)> {}; 
00124 }
00125 
00126 
00127 
00128 /** Use an Allocator to implement a class' new and delete
00129  *  @ingroup Allocator
00130  *
00131  *  Derive from this class to implement new and delete based on Allocator.
00132  *  It uses a singleton of Allocator that is shared per 
00133  *  <Allocator type, destruction priority> pair.  So if you have another
00134  *  class using the same allocator and destruction priority, the allocator
00135  *  singleton will be shared.
00136  */
00137 template 
00138 <
00139     typename VariableAllocator,
00140     int destructionPriority = destructionPriorityInternalAllocators
00141 >
00142 class AllocatorClassAdaptor
00143 {
00144 public:
00145     static void* operator new(std::size_t size)
00146     {
00147         void* result = allocator()->allocate(size);
00148         if (!result)
00149         {
00150             throw std::bad_alloc();
00151         }
00152         return result;
00153     }
00154 
00155     static void* operator new(std::size_t size, std::nothrow_t) throw()
00156     {
00157         try
00158         {
00159             return allocator()->allocate(size);
00160         }
00161         catch (...)
00162         {
00163             return 0;
00164         }
00165     }
00166 
00167     static void* operator new(std::size_t, void* mem)
00168     {
00169         return mem;
00170     }
00171 
00172     static void* operator new[](std::size_t size)
00173     {
00174         void* result = allocator()->allocate(size);
00175         if (!result)
00176         {
00177             throw std::bad_alloc();
00178         }
00179         return result;
00180     }
00181 
00182     static void* operator new[](std::size_t size, std::nothrow_t) throw()
00183     {
00184         try
00185         {
00186             return allocator()->allocate(size);
00187         }
00188         catch (...)
00189         {
00190             return 0;
00191         }
00192     }
00193 
00194     static void* operator new[](std::size_t, void* mem)
00195     {
00196         return mem;
00197     }
00198 
00199     static void operator delete(void* mem, std::size_t size)
00200     {
00201         allocator()->deallocate(mem, size);
00202     }
00203 
00204     static void operator delete(void* mem, std::size_t size, std::nothrow_t)
00205     {
00206         allocator()->deallocate(mem, size);
00207     }
00208 
00209     static void operator delete(void*, std::size_t, std::nothrow_t, void*)
00210     {
00211     }
00212 
00213     static void operator delete[](void* mem, std::size_t size)
00214     {
00215         allocator()->deallocate(mem, size);
00216     }
00217 
00218     static void operator delete[](void* mem, std::size_t size, std::nothrow_t)
00219     {
00220         allocator()->deallocate(mem, size);
00221     }
00222 
00223     static void operator delete[](void*, std::size_t, std::nothrow_t, void*)
00224     {
00225     }
00226 
00227 private:
00228 
00229     static VariableAllocator* allocator()
00230     {
00231         return util::Singleton<VariableAllocator, destructionPriority>::instance();
00232     }
00233 };
00234 
00235 
00236 
00237 /** @ingroup Allocator
00238  *  @arg concept: VariableAllocator
00239  */
00240 class AllocatorMalloc
00241 {
00242 public:
00243     void* allocate(size_t size)
00244     {
00245         return ::malloc(size);
00246     }
00247     void deallocate(void *mem)
00248     {
00249         ::free(mem);
00250     }
00251     void deallocate(void* mem, size_t)
00252     {
00253         ::free(mem);
00254     }
00255 };
00256 
00257 
00258 
00259 /** Guard a MT unsafe allocator with some lock.
00260  *  @ingroup Allocator
00261  *  @arg concept: VariableAllocator or FixedAllocator, depending on template argument   
00262  */
00263 template
00264 <
00265     typename Allocator,
00266     typename Lock,
00267     typename Locker = util::Locker<Lock>
00268 >
00269 class AllocatorLocked: public Allocator
00270 {
00271 public:
00272     AllocatorLocked():
00273         Allocator()
00274     {
00275     }
00276     AllocatorLocked(size_t size):
00277         Allocator(size)
00278     {
00279     }
00280     void* allocate()
00281     {
00282         Locker locker(lock_);
00283         return Allocator::allocate();
00284     }
00285     void* allocate(size_t size)
00286     {
00287         Locker locker(lock_);
00288         return Allocator::allocate(size);
00289     }
00290     void deallocate(void* mem)
00291     {
00292         Locker locker(lock_);
00293         Allocator::deallocate(mem);
00294     }
00295     void deallocate(void* mem, size_t size)
00296     {
00297         Locker locker(lock_);
00298         Allocator::deallocate(mem, size);
00299     }
00300 private:
00301     Lock lock_;
00302 };
00303 
00304 
00305 
00306 /** Instantiates an Allocator per thread.
00307  *  @ingroup Allocator
00308  *  @arg concept: VariableAllocator or FixedAllocator, depending on template argument
00309  *  @arg requirements: Allocator must be copyconstructible
00310  *  @warning it is potentially unsafe to allocate and deallocate memory in two different threads 
00311  *      if you're using this one!  This depends on underlying Allocator
00312  */
00313 template
00314 <
00315     typename Allocator
00316 >
00317 class AllocatorPerThread
00318 {
00319 public:
00320     AllocatorPerThread():
00321         allocator_()
00322     {
00323     }
00324     AllocatorPerThread(size_t size):
00325         allocator_(size)
00326     {
00327     }
00328     void* allocate()
00329     {
00330         return allocator_->allocate();
00331     }
00332     void* allocate(size_t size)
00333     {
00334         return allocator_->allocate(size);
00335     }
00336     void deallocate(void* mem)
00337     {
00338         return allocator_->deallocate(mem);
00339     }
00340     void deallocate(void* mem, size_t size)
00341     {
00342         return allocator_->deallocate(mem, size);
00343     }
00344 private:
00345     util::ThreadLocalVariable<Allocator> allocator_;
00346 };
00347 
00348 
00349 
00350 /** A variable-size allocator built on top of a fixed-size allocator
00351  *  @ingroup Allocator
00352  *  @arg concept: VariableAllocator
00353  *  @warning CURRENTLY THREAD UNSAFE!
00354  */
00355 template 
00356 <
00357     typename FixedAllocator
00358 >
00359 class AllocatorVariable
00360 {
00361 public:
00362     void* allocate(size_t size)
00363     {
00364         return fixedAllocator(size).allocate();
00365     }
00366     void deallocate(void* mem, size_t size)
00367     {
00368         fixedAllocator(size).deallocate(mem);
00369     }
00370 private:
00371     typedef std::map<size_t, FixedAllocator> TFixedAllocators;
00372     
00373     FixedAllocator& fixedAllocator(size_t size)
00374     {
00375         typename TFixedAllocators::iterator allocator = fixedAllocators_.find(size);
00376         if (allocator == fixedAllocators_.end())
00377         {
00378             allocator = fixedAllocators_.insert(
00379                 std::make_pair(size, FixedAllocator(size))).first;
00380         }
00381         return allocator->second;
00382     }
00383     
00384     TFixedAllocators fixedAllocators_;
00385 };
00386 
00387 
00388 
00389 /** fixes a variable-size allocator to one size.
00390  */
00391 template
00392 <
00393     typename VariableAllocator = AllocatorMalloc
00394 >
00395 class AllocatorFixed: private VariableAllocator
00396 {
00397 public:
00398     AllocatorFixed(size_t size): 
00399         size_(size)
00400     {
00401     }
00402     void* allocate()
00403     {
00404         return VariableAllocator::allocate(size_);
00405     }
00406     void deallocate(void* mem)
00407     {
00408         VariableAllocator::deallocate(mem, size_);
00409     }
00410 private:
00411     size_t size_;
00412 };
00413 
00414 
00415 
00416 /** @ingroup Allocator
00417  */
00418 struct BinnerOne
00419 {
00420     static size_t bin(size_t size)
00421     {
00422         return size > 0 ? size - 1 : 0;
00423     }
00424     static size_t size(size_t bin)
00425     {
00426         return bin + 1;
00427     }
00428 };
00429 
00430 /** @ingroup Allocator
00431  */
00432 struct BinnerPower2
00433 {
00434     static size_t bin(size_t size)
00435     {
00436         size_t bin = 0;
00437         while (size > BinnerPower2::size(bin))
00438         {
00439             ++bin;
00440         }
00441         return bin;
00442     }
00443     static size_t size(size_t bin)
00444     {
00445         return size_t(1) << bin;
00446     }
00447 };
00448 
00449 /** @ingroup Allocator
00450  */
00451 template <size_t multiple>
00452 struct BinnerPadded
00453 {
00454     static size_t bin(size_t size)
00455     {
00456         return size > 0 ? (size - 1) / multiple : 0;
00457     }
00458     static size_t size(size_t bin)
00459     {
00460         return (bin + 1) * multiple;
00461     }
00462 };
00463 
00464 /** @ingroup Allocator
00465  */
00466 template
00467 <
00468     typename FixedAllocator,
00469     size_t maxBinSize = 128,
00470     typename Binner = BinnerOne,
00471     typename VariableAllocator = AllocatorMalloc
00472 >
00473 class AllocatorBinned: public VariableAllocator
00474 {
00475 public:
00476     AllocatorBinned()
00477     {
00478         initFixed();
00479     }
00480     AllocatorBinned(const AllocatorBinned& other)
00481     {
00482         copyInitFixed(other);
00483     }
00484     ~AllocatorBinned()
00485     {
00486         destroyFixed(numBins());
00487     }
00488     void* allocate(size_t size)
00489     {
00490         if (size > maxBinSize)
00491         {
00492             return VariableAllocator::allocate(size);
00493         }
00494         return fixedAllocators_[Binner::bin(size)].allocate();
00495     }
00496     void deallocate(void* mem, size_t size)
00497     {
00498         if (size > maxBinSize)
00499         {
00500             VariableAllocator::deallocate(mem, size);
00501         }
00502         else
00503         {
00504             fixedAllocators_[Binner::bin(size)].deallocate(mem);
00505         }
00506     }
00507 private:
00508     AllocatorBinned& operator=(const AllocatorBinned&);
00509     
00510     const size_t numBins() const
00511     {
00512         return Binner::bin(maxBinSize) + 1;
00513     }
00514     void allocFixed()
00515     {
00516         fixedAllocators_ = static_cast<FixedAllocator*>(
00517             VariableAllocator::allocate(numBins() * sizeof(FixedAllocator)));
00518         if (!fixedAllocators_)
00519         {
00520             throw std::bad_alloc();
00521         }
00522     }
00523     void initFixed()
00524     {
00525         // Kids, don't try this at home.  We're trained professionals here! ;)
00526         //
00527         allocFixed();
00528         const size_t n = numBins();
00529         for (size_t i = 0; i < n; ++i)
00530         {
00531             try
00532             {
00533                 new(&fixedAllocators_[i]) FixedAllocator(Binner::size(i));
00534             }
00535             catch (...)
00536             {
00537                 destroyFixed(i);
00538                 throw;
00539             }
00540         }
00541     }
00542     void copyInitFixed(const AllocatorBinned& other)
00543     {
00544         // Kids, don't try this either.
00545         //
00546         allocFixed();
00547         const size_t n = numBins();
00548         for (size_t i = 0; i < n; ++i)
00549         {
00550             try
00551             {
00552                 new(&fixedAllocators_[i]) FixedAllocator(other.fixedAllocators_[i]);
00553             }
00554             catch (...)
00555             {
00556                 destroyFixed(i);
00557                 throw;
00558             }
00559         }
00560     }
00561     void destroyFixed(size_t i)
00562     {
00563         // and neither this.
00564         //
00565         while (i > 0)
00566         {
00567             fixedAllocators_[--i].~FixedAllocator();
00568         }
00569         VariableAllocator::deallocate(fixedAllocators_, numBins() * sizeof(FixedAllocator));
00570     }
00571 
00572     FixedAllocator* fixedAllocators_;
00573 };
00574 
00575 
00576 
00577 /** @ingroup Allocator
00578  *  @arg concept: FixedAllocator except for the constructor.
00579  */
00580 template
00581 <
00582     typename FixedAllocator,
00583     size_t size
00584 >
00585 class AllocatorStaticFixed: public FixedAllocator
00586 {
00587 public:
00588     AllocatorStaticFixed():
00589         FixedAllocator(size)
00590     {
00591     }
00592 };
00593 
00594 
00595 
00596 /** @ingroup Allocator
00597  *  @arg concept: same as Allocator
00598  */
00599 template
00600 <
00601     typename Allocator
00602 >
00603 class AllocatorThrow: public Allocator
00604 {
00605 public:
00606     AllocatorThrow():
00607         Allocator()
00608     {
00609     }
00610     AllocatorThrow(size_t size):
00611         Allocator(size)
00612     {
00613     }
00614     void* allocate()
00615     {
00616         if (void* p = Allocator::allocate())
00617         {
00618             return p;
00619         }
00620         throw std::bad_alloc();
00621     }
00622     void* allocate(size_t size)
00623     {
00624         if (void* p = Allocator::allocate(size))
00625         {
00626             return p;
00627         }
00628         throw std::bad_alloc();
00629     }
00630 };
00631 
00632 
00633 
00634 /** @ingroup Allocator
00635  *  @arg concept: same as Allocator
00636  */
00637 template
00638 <
00639     typename Allocator
00640 >
00641 class AllocatorNoThrow: public Allocator
00642 {
00643 public:
00644     AllocatorNoThrow():
00645         Allocator()
00646     {
00647     }
00648     AllocatorNoThrow(size_t size):
00649         Allocator(size)
00650     {
00651     }
00652     void* allocate()
00653     {
00654         try
00655         {
00656             return Allocator::allocate();
00657         }
00658         catch (...)
00659         {
00660             return 0;
00661         }
00662     }
00663     void* allocate(size_t size)
00664     {
00665         try
00666         {
00667             return Allocator::allocate(size);
00668         }
00669         catch (...)
00670         {
00671             return 0;
00672         }
00673     }
00674 };
00675 
00676 
00677 
00678 /** @ingroup Allocator
00679  *  @arg concept: same as Allocator, except for the constructor if Allocator == FixedAllocator.
00680  */
00681 template
00682 <
00683     typename VariableAllocator,
00684     int destructionPriority = destructionPriorityInternalAllocators
00685 >
00686 class AllocatorSingleton
00687 {
00688 public:
00689     void* allocate()
00690     {
00691         return allocator()->allocate();
00692     }
00693     void* allocate(size_t size)
00694     {
00695         return allocator()->allocate(size);
00696     }
00697     void deallocate(void* mem)
00698     {
00699         return allocator()->deallocate(mem);
00700     }
00701     void deallocate(void* mem, size_t size)
00702     {
00703         return allocator()->deallocate(mem, size);
00704     }
00705     VariableAllocator* allocator()
00706     {
00707         return util::Singleton<VariableAllocator, destructionPriority>::instance();
00708     }
00709 };
00710 
00711 
00712 
00713 /** @ingroup Allocator
00714  *  @warning THREAD UNSAFE!
00715  */
00716 template
00717 <
00718     typename Allocator
00719 >
00720 class AllocatorStats: public Allocator
00721 {
00722 public:
00723     AllocatorStats():
00724         Allocator()
00725     {
00726     }
00727     AllocatorStats(size_t size):
00728         Allocator(size)
00729     {
00730         stats_.insert(std::make_pair(size, Stat()));
00731     }
00732     ~AllocatorStats()
00733     {
00734         LASS_CLOG << "~" << typeid(AllocatorStats).name() << "()" << std::endl;
00735         LASS_CLOG << "size: allocations/deallocations" << std::endl;
00736         for (typename TStats::const_iterator i = stats_.begin(); i != stats_.end(); ++i)
00737         {
00738             Stat stat = i->second;
00739             LASS_CLOG << i->first << ": " << stat.allocations << "/" << stat.deallocations << std::endl;
00740             if (stat.allocations != stat.deallocations)
00741             {
00742                 LASS_CERR << "[LASS RUN MSG] Allocation unbalance detected in AllocatorStats" << std::endl;
00743             }
00744         }
00745     }
00746     void* allocate(size_t size)
00747     {
00748         ++stats_[size].allocations;
00749         return Allocator::allocate(size);
00750     }
00751     void* allocate()
00752     {
00753         ++stats_.begin()->second.allocations;
00754         return Allocator::allocate();
00755     }
00756     void deallocate(void* mem, size_t size)
00757     {
00758         ++stats_[size].deallocations;
00759         Allocator::deallocate(mem, size);
00760     }
00761     void deallocate(void* mem)
00762     {
00763         ++stats_.begin()->second.deallocations;
00764         Allocator::deallocate(mem);
00765     }
00766 private:
00767     struct Stat
00768     {
00769         unsigned allocations;
00770         unsigned deallocations;
00771     };
00772     typedef std::map<size_t, Stat> TStats;
00773     TStats stats_;
00774 };
00775 
00776 
00777 
00778 
00779 
00780 /** A fixed-size free-list allocator
00781  *  @ingroup Allocator
00782  *  @arg concept: FixedAllocator
00783  *  @arg thread UNSAFE
00784  *  @arg copy-constructible, not assignable
00785  */
00786 template 
00787 <
00788     typename FixedAllocator = AllocatorFixed<AllocatorMalloc>
00789 >
00790 class AllocatorFreeList: public FixedAllocator
00791 {
00792 public:
00793     AllocatorFreeList(size_t iSize):
00794         FixedAllocator(std::max<size_t>(sizeof(AllocationNode),iSize)),
00795         top_(0)
00796     {
00797     }
00798     ~AllocatorFreeList()
00799     {
00800         while (top_)
00801         {
00802             AllocationNode* node = top_;
00803             top_ = node->next;
00804             FixedAllocator::deallocate(node);
00805         }
00806     }
00807     AllocatorFreeList(const AllocatorFreeList& iOther):
00808         FixedAllocator(static_cast<const FixedAllocator&>(iOther)),
00809         top_(0)
00810     {
00811     }
00812     void* allocate()
00813     {
00814         if (!top_)
00815         {
00816             return FixedAllocator::allocate();
00817         }
00818         AllocationNode* topNode = top_;
00819         top_ = topNode->next;
00820         return topNode;
00821     }
00822     void deallocate(void* iPointer)
00823     {
00824         if (!iPointer)
00825             return;
00826         AllocationNode* temp = static_cast<AllocationNode*>(iPointer);
00827         temp->next = top_;
00828         top_ = temp;
00829     }
00830 private:
00831     AllocatorFreeList& operator=(AllocatorFreeList&);
00832 
00833     struct AllocationNode
00834     {
00835         AllocationNode* next;
00836     };
00837     AllocationNode* top_;
00838 };
00839 
00840 
00841 
00842 
00843 /** A fixed-size lock-free free-list allocator
00844  *  @ingroup Allocator
00845  *  @arg concept: FixedAllocator
00846  *  @arg thread safe, lock-free
00847  *  @arg copy-constructible, not assignable
00848  */
00849 template 
00850 <
00851     typename FixedAllocator = AllocatorFixed<AllocatorMalloc>
00852 >
00853 class AllocatorConcurrentFreeList: public FixedAllocator
00854 {
00855 public:
00856     AllocatorConcurrentFreeList(size_t iSize):
00857         FixedAllocator(std::max<size_t>(sizeof(AllocationNode),iSize)),
00858         top_()
00859     {
00860     }
00861     ~AllocatorConcurrentFreeList()
00862     {
00863         TTaggedPtr top = top_;
00864         AllocationNode* node = top.get();
00865         while (node)
00866         {
00867             AllocationNode* next = node->next;
00868             FixedAllocator::deallocate(node);
00869             node = next;
00870         }
00871     }
00872     AllocatorConcurrentFreeList(const AllocatorConcurrentFreeList& iOther):
00873         FixedAllocator(static_cast<const FixedAllocator&>(iOther)),
00874         top_()
00875     {
00876     }
00877     void* allocate()
00878     {
00879         TTaggedPtr topNode;
00880         TTaggedPtr next;
00881         do
00882         {
00883             topNode = top_;
00884             if (!topNode)
00885             {
00886                 return FixedAllocator::allocate();
00887             }
00888             next = TTaggedPtr(topNode->next, topNode.tag() + 1);
00889         }
00890         while (!top_.atomicCompareAndSwap(topNode, next));
00891         return topNode.get();
00892     }
00893     void deallocate(void* iPointer)
00894     {
00895         if (!iPointer)
00896             return;
00897         AllocationNode* temp = static_cast<AllocationNode*>(iPointer);
00898         TTaggedPtr topNode;
00899         TTaggedPtr newTop;
00900         do
00901         {
00902             topNode = top_;
00903             temp->next = topNode.get();
00904             newTop = TTaggedPtr(temp, topNode.tag() + 1);
00905         }
00906         while (!top_.atomicCompareAndSwap(topNode, newTop));
00907     }
00908 private:
00909     struct AllocationNode
00910     {
00911         AllocationNode* next;
00912     };
00913     
00914     typedef TaggedPtr<AllocationNode> TTaggedPtr;
00915 
00916     AllocatorConcurrentFreeList& operator=(const AllocatorConcurrentFreeList&);
00917 
00918     volatile TTaggedPtr top_;
00919 };
00920 
00921 
00922 
00923 /** A simple fixed-size block allocator
00924  *  @ingroup Allocator
00925  *  @arg concept: FixedAllocator
00926  *  @arg thread UNSAFE.
00927  *  @arg not copy-constructible, not assignable
00928  *  @arg The blocks are only deallocated at destruction time to simplify the allocation/deallocation logic.
00929  *      However, nodes that are deallocate can still be reused.
00930  *  @arg It is NOT SAFE to deallocate memory using another instance of AllocatorSimpleBlock than the one
00931  *      used to allocate it.
00932  */
00933 template 
00934 <
00935     size_t requestedBlockSize = 8192,
00936     typename FixedAllocator = AllocatorFixed<AllocatorMalloc>
00937 >
00938 class AllocatorSimpleBlock: public FixedAllocator
00939 {
00940 public:
00941     AllocatorSimpleBlock(size_t size):
00942         FixedAllocator(blockSize(size, 0, 0)),
00943         blocks_(0),
00944         pool_(0)
00945     {
00946         blockSize(size, &size_, &allocationsPerBlock_);
00947     }
00948     ~AllocatorSimpleBlock()
00949     {
00950         while (blocks_)
00951         {
00952             Node* const node = blocks_;
00953             blocks_ = node->next;
00954             FixedAllocator::deallocate(node);
00955         }
00956     }
00957     void* allocate()
00958     {
00959         if (!pool_ && !grow())
00960         {
00961             return 0;
00962         }
00963         LASS_ASSERT(pool_);
00964         Node* p = pool_;
00965         pool_ = p->next;
00966         return p;
00967     }
00968     void deallocate(void* pointer)
00969     {
00970         Node* p = static_cast<Node*>(pointer);
00971         p->next = pool_;
00972         pool_ = p;
00973     }
00974 
00975 private:
00976     
00977     struct Node
00978     {
00979         Node* next;
00980     };
00981 
00982     const bool grow()
00983     {
00984         char* mem = static_cast<char*>(FixedAllocator::allocate());
00985         if (!mem)
00986         {
00987             return false;
00988         }
00989         Node* const block = reinterpret_cast<Node*>(mem);
00990         block->next = blocks_;
00991         blocks_ = block;
00992         mem += sizeof(Node);
00993         for (size_t i = 0; i < allocationsPerBlock_; ++i)
00994         {
00995             deallocate(mem); // pushes in pool
00996             mem += size_;
00997         }
00998         return true;
00999     }
01000     static const size_t blockSize(size_t size, size_t* pSize, size_t* pAllocationsPerBlock)
01001     {
01002         size = std::max(sizeof(Node), size);
01003         const size_t maxBlockSize = std::max(sizeof(Node), requestedBlockSize);
01004         const size_t allocationsPerBlock = std::max(size_t(1), (maxBlockSize - sizeof(Node)) / size);
01005         const size_t blockSize = sizeof(Node) + allocationsPerBlock * size;
01006         if (pSize) *pSize = size;
01007         if (pAllocationsPerBlock) *pAllocationsPerBlock = allocationsPerBlock;
01008         return blockSize;
01009     }
01010 
01011     AllocatorSimpleBlock(const AllocatorSimpleBlock&);
01012     AllocatorSimpleBlock& operator=(const AllocatorSimpleBlock&);
01013 
01014     Node* blocks_;
01015     Node* pool_;
01016     size_t size_;
01017     size_t allocationsPerBlock_;
01018 };
01019 
01020 
01021 
01022 /** @ingroup Allocator
01023  *  @arg concept: VariableAllocator
01024  *
01025  *  AllocatorAligned adds @a alignment bytes to the requested block size to be able to shift
01026  *  the pointer to a alignment boundary.
01027  */
01028 template
01029 <
01030     unsigned char alignment,
01031     typename VariableAllocator = AllocatorMalloc
01032 >
01033 class AllocatorAligned: public VariableAllocator
01034 {
01035 public:
01036     void* allocate(size_t size)
01037     {
01038         return align(VariableAllocator::allocate(size + alignment));
01039     }
01040     void deallocate(void* mem, size_t size)
01041     {
01042         VariableAllocator::deallocate(unalign(mem), size + alignment);
01043     }
01044 private:
01045     void* align(void* mem)
01046     {
01047         const num::TuintPtr address = reinterpret_cast<num::TuintPtr>(mem);
01048         const unsigned char offset = alignment - (address % alignment);
01049         LASS_ASSERT(offset > 0);
01050         unsigned char* const aligned = static_cast<unsigned char*>(mem) + offset;
01051         *(aligned - 1) = offset;
01052         return aligned;
01053     }
01054     void* unalign(void* mem)
01055     {
01056         unsigned char* const aligned = static_cast<unsigned char*>(mem);
01057         const unsigned char offset = *(aligned - 1);
01058         return aligned - offset;
01059     }
01060 };
01061 
01062 
01063 
01064 /** @ingroup Allocator
01065  *  @arg concept: VariableAllocator
01066  *
01067  *  AllocatorSized adds sizeof(size_t) bytes in front of the returned pointer to store the
01068  *  requested block size.  Because of that, you can use the simpler form 
01069  *  <tt>void deallocate(void* mem)</tt> to deallocate the memory.
01070  *
01071  *  @warning the fact that this allocator supports <tt>void deallocate(void* mem)</tt> does
01072  *      @b NOT mean it implements the FixedAllocator concept, not even partially.  So when
01073  *      you use AllocatorSized as a base for an allocator that implements both the 
01074  *      VariableAllocator as FixedAllocator concept depending on the base allocator (such as
01075  *      AllocatorStats), the result will be a VariableAllocator.  You cannot generally assume 
01076  *      you will be able to use <tt>void deallocate(void* mem)</tt> on the resulting allocator
01077  *      as well.  Particularly for AllocatorStats, it is know that this will cause undefined
01078  *      behaviour.  However, using AllocatorStats as base AllocatorSized will achieve the 
01079  *      desired effect perfectly well, and will cause no problem (= using AllocatorSized
01080  *      on top-level)
01081  *
01082  *  <b>it is advised to use AllocatorSized as a top-level allocator.  Using AllocatorSized
01083  *  as a base allocator for others can cause undefined behaviour.  A notable exception is
01084  *  AllocatorSingleton, it is safe to use that one on top of AllocatorSized.</b>
01085  */
01086 template
01087 <
01088     typename VariableAllocator = AllocatorMalloc
01089 >
01090 class AllocatorSized: public VariableAllocator
01091 {
01092 public:
01093     void* allocate(size_t size)
01094     {
01095         size_t* p = static_cast<size_t*>(VariableAllocator::allocate(size + sizeof(size_t)));
01096         *p = size;
01097         return p + 1;
01098     }
01099     void deallocate(void* mem, size_t size)
01100     {
01101         size_t* p = static_cast<size_t*>(mem) - 1;
01102         LASS_ASSERT(*p == size);
01103         VariableAllocator::deallocate(p, size + sizeof(size_t));
01104     }
01105     void deallocate(void* mem)
01106     {
01107         size_t* p = static_cast<size_t*>(mem) - 1;
01108         VariableAllocator::deallocate(p, *p + sizeof(size_t));
01109     }
01110 };
01111 
01112 }
01113 }
01114 
01115 #endif
01116 
01117 // EOF

Generated on Mon Nov 10 14:19:59 2008 for Library of Assembled Shared Sources by doxygen 1.5.7.1
SourceForge.net Logo