@@ -20,32 +20,32 @@ template<typename _size_type>
2020class PoolAddressAllocator : public AddressAllocatorBase <PoolAddressAllocator<_size_type>,_size_type>
2121{
2222 private:
23- typedef AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type> Base ;
23+ using base_t = AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type>;
2424
2525 void copyState (const PoolAddressAllocator& other, _size_type newBuffSz)
2626 {
2727 if (blockCount>other.blockCount )
2828 freeStackCtr = blockCount-other.blockCount ;
2929
3030 #ifdef _NBL_DEBUG
31- assert (Base::checkResize (newBuffSz,Base::alignOffset));
32- assert (freeStackCtr==0u );
31+ assert (base_t::checkResize (newBuffSz,base_t ::alignOffset));
3332 #endif // _NBL_DEBUG
3433
3534 for (_size_type i=0u ; i<freeStackCtr; i++)
36- getFreeStack (i) = (blockCount-1u -i)*blockSize+Base ::combinedOffset;
35+ getFreeStack (i) = (blockCount-1u -i)*blockSize+base_t ::combinedOffset;
3736
3837 for (_size_type i=0 ; i<other.freeStackCtr ; i++)
3938 {
40- _size_type freeEntry = other.getFreeStack (i)-other.combinedOffset ;
39+ _size_type freeEntry = other.getFreeStack (i)-other.base_t :: combinedOffset;
4140 // check in case of shrink
4241 if (freeEntry<blockCount*blockSize)
43- getFreeStack (freeStackCtr++) = freeEntry+Base ::combinedOffset;
42+ getFreeStack (freeStackCtr++) = freeEntry+base_t ::combinedOffset;
4443 }
4544 }
45+
4646 inline bool safe_shrink_size_common (_size_type& sizeBound, _size_type newBuffAlignmentWeCanGuarantee) noexcept
4747 {
48- _size_type capacity = get_total_size ()-Base ::alignOffset;
48+ _size_type capacity = get_total_size ()-base_t ::alignOffset;
4949 if (sizeBound>=capacity)
5050 return false ;
5151
@@ -71,7 +71,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
7171 virtual ~PoolAddressAllocator () {}
7272
7373 PoolAddressAllocator (void * reservedSpc, _size_type addressOffsetToApply, _size_type alignOffsetNeeded, _size_type maxAllocatableAlignment, size_type bufSz, size_type blockSz) noexcept :
74- Base (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
74+ base_t (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
7575 blockCount ((bufSz-alignOffsetNeeded)/blockSz), blockSize(blockSz), freeStackCtr(0u )
7676 {
7777 reset ();
@@ -80,29 +80,28 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
8080 // ! When resizing we require that the copying of data buffer has already been handled by the user of the address allocator
8181 template <typename ... Args>
8282 PoolAddressAllocator (_size_type newBuffSz, PoolAddressAllocator&& other, Args&&... args) noexcept :
83- Base (std::move( other) ,std::forward<Args>(args)...),
84- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
83+ base_t ( other,std::forward<Args>(args)...),
84+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
8585 {
8686 copyState (other, newBuffSz);
8787
88- other.blockCount = invalid_address;
89- other.blockSize = invalid_address;
90- other.freeStackCtr = invalid_address;
88+ other.invalidate ();
9189 }
9290 template <typename ... Args>
9391 PoolAddressAllocator (_size_type newBuffSz, const PoolAddressAllocator& other, Args&&... args) noexcept :
94- Base (other, std::forward<Args>(args)...),
95- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
92+ base_t (other, std::forward<Args>(args)...),
93+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
9694 {
9795 copyState (other, newBuffSz);
9896 }
9997
10098 PoolAddressAllocator& operator =(PoolAddressAllocator&& other)
10199 {
102- Base::operator =(std::move (other));
103- std::swap (blockCount,other.blockCount );
104- std::swap (blockSize,other.blockSize );
105- std::swap (freeStackCtr,other.freeStackCtr );
100+ base_t ::operator =(std::move (other));
101+ blockCount = other.blockCount ;
102+ blockSize = other.blockSize ;
103+ freeStackCtr = other.freeStackCtr ;
104+ other.invalidateLocal ();
106105 return *this ;
107106 }
108107
@@ -118,15 +117,15 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
118117 inline void free_addr (size_type addr, size_type bytes) noexcept
119118 {
120119 #ifdef _NBL_DEBUG
121- assert (addr>=Base ::combinedOffset && (addr-Base ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
120+ assert (addr>=base_t ::combinedOffset && (addr-base_t ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
122121 #endif // _NBL_DEBUG
123122 getFreeStack (freeStackCtr++) = addr;
124123 }
125124
126125 inline void reset ()
127126 {
128127 for (freeStackCtr=0u ; freeStackCtr<blockCount; freeStackCtr++)
129- getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+Base ::combinedOffset;
128+ getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+base_t ::combinedOffset;
130129 }
131130
132131 // ! conservative estimate, does not account for space lost to alignment
@@ -151,7 +150,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
151150 for (size_type i=0 ; i<freeStackCtr; i++)
152151 {
153152 auto freeAddr = getFreeStack (i);
154- if (freeAddr<sizeBound+Base ::combinedOffset)
153+ if (freeAddr<sizeBound+base_t ::combinedOffset)
155154 continue ;
156155
157156 tmpStackCopy[boundedCount++] = freeAddr;
@@ -162,7 +161,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
162161 std::make_heap (tmpStackCopy,tmpStackCopy+boundedCount);
163162 std::sort_heap (tmpStackCopy,tmpStackCopy+boundedCount);
164163 // could do sophisticated modified version of std::adjacent_find with a binary search, but F'it
165- size_type endAddr = (blockCount-1u )*blockSize+Base ::combinedOffset;
164+ size_type endAddr = (blockCount-1u )*blockSize+base_t ::combinedOffset;
166165 size_type i=0u ;
167166 for (;i<boundedCount; i++,endAddr-=blockSize)
168167 {
@@ -173,7 +172,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
173172 sizeBound -= i*blockSize;
174173 }
175174 }
176- return Base ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
175+ return base_t ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
177176 }
178177
179178
@@ -197,16 +196,36 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
197196 }
198197 inline size_type get_total_size () const noexcept
199198 {
200- return blockCount*blockSize+Base ::alignOffset;
199+ return blockCount*blockSize+base_t ::alignOffset;
201200 }
202201
203202
204203
205204 inline size_type addressToBlockID (size_type addr) const noexcept
206205 {
207- return (addr-Base ::combinedOffset)/blockSize;
206+ return (addr-base_t ::combinedOffset)/blockSize;
208207 }
209208 protected:
209+
210+ /* *
211+ * @brief Invalidates only fields from this class extension
212+ */
213+ void invalidateLocal ()
214+ {
215+ blockCount = invalid_address;
216+ blockSize = invalid_address;
217+ freeStackCtr = invalid_address;
218+ }
219+
220+ /* *
221+ * @brief Invalidates all fields
222+ */
223+ void invalidate ()
224+ {
225+ base_t::invalidate ();
226+ invalidateLocal ();
227+ }
228+
210229 size_type blockCount;
211230 size_type blockSize;
212231 // TODO: free address min-heap and allocated addresses max-heap, packed into the same memory (whatever is not allocated is free)
@@ -215,8 +234,8 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
215234 // but then should probably have two pool allocators, because doing that changes insertion/removal from O(1) to O(log(N))
216235 size_type freeStackCtr;
217236
218- inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(Base ::reservedSpace)[i];}
219- inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(Base ::reservedSpace)[i];}
237+ inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(base_t ::reservedSpace)[i];}
238+ inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(base_t ::reservedSpace)[i];}
220239};
221240
222241
0 commit comments