@@ -20,32 +20,32 @@ template<typename _size_type>
2020class PoolAddressAllocator : public AddressAllocatorBase <PoolAddressAllocator<_size_type>,_size_type>
2121{
2222 private:
23- typedef AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type> Base ;
23+ using base_t = AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type>;
2424
2525 void copyState (const PoolAddressAllocator& other, _size_type newBuffSz)
2626 {
2727 if (blockCount>other.blockCount )
2828 freeStackCtr = blockCount-other.blockCount ;
2929
3030 #ifdef _NBL_DEBUG
31- assert (Base ::checkResize (newBuffSz,Base ::alignOffset));
31+ assert (base_t ::checkResize (newBuffSz,base_t ::alignOffset));
3232 #endif // _NBL_DEBUG
3333
3434 for (_size_type i=0u ; i<freeStackCtr; i++)
35- getFreeStack (i) = (blockCount-1u -i)*blockSize+Base ::combinedOffset;
35+ getFreeStack (i) = (blockCount-1u -i)*blockSize+base_t ::combinedOffset;
3636
3737 for (_size_type i=0 ; i<other.freeStackCtr ; i++)
3838 {
39- _size_type freeEntry = other.getFreeStack (i)-other.Base ::combinedOffset;
39+ _size_type freeEntry = other.getFreeStack (i)-other.base_t ::combinedOffset;
4040 // check in case of shrink
4141 if (freeEntry<blockCount*blockSize)
42- getFreeStack (freeStackCtr++) = freeEntry+Base ::combinedOffset;
42+ getFreeStack (freeStackCtr++) = freeEntry+base_t ::combinedOffset;
4343 }
4444 }
4545
4646 inline bool safe_shrink_size_common (_size_type& sizeBound, _size_type newBuffAlignmentWeCanGuarantee) noexcept
4747 {
48- _size_type capacity = get_total_size ()-Base ::alignOffset;
48+ _size_type capacity = get_total_size ()-base_t ::alignOffset;
4949 if (sizeBound>=capacity)
5050 return false ;
5151
@@ -71,7 +71,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
7171 virtual ~PoolAddressAllocator () {}
7272
7373 PoolAddressAllocator (void * reservedSpc, _size_type addressOffsetToApply, _size_type alignOffsetNeeded, _size_type maxAllocatableAlignment, size_type bufSz, size_type blockSz) noexcept :
74- Base (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
74+ base_t (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
7575 blockCount ((bufSz-alignOffsetNeeded)/blockSz), blockSize(blockSz), freeStackCtr(0u )
7676 {
7777 reset ();
@@ -80,32 +80,28 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
8080 // ! When resizing we require that the copying of data buffer has already been handled by the user of the address allocator
8181 template <typename ... Args>
8282 PoolAddressAllocator (_size_type newBuffSz, PoolAddressAllocator&& other, Args&&... args) noexcept :
83- Base (other,std::forward<Args>(args)...),
84- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
83+ base_t (other,std::forward<Args>(args)...),
84+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
8585 {
8686 copyState (other, newBuffSz);
8787
88- Base::operator =(std::move (other));
89- std::swap (reservedSpace, other.reservedSpace );
90-
91- other.blockCount = invalid_address;
92- other.blockSize = invalid_address;
93- other.freeStackCtr = invalid_address;
88+ other.invalidate ();
9489 }
9590 template <typename ... Args>
9691 PoolAddressAllocator (_size_type newBuffSz, const PoolAddressAllocator& other, Args&&... args) noexcept :
97- Base (other, std::forward<Args>(args)...),
98- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
92+ base_t (other, std::forward<Args>(args)...),
93+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
9994 {
10095 copyState (other, newBuffSz);
10196 }
10297
10398 PoolAddressAllocator& operator =(PoolAddressAllocator&& other)
10499 {
105- Base::operator =(std::move (other));
106- std::swap (blockCount,other.blockCount );
107- std::swap (blockSize,other.blockSize );
108- std::swap (freeStackCtr,other.freeStackCtr );
100+ base_t ::operator =(std::move (other));
101+ blockCount = other.blockCount ;
102+ blockSize = other.blockSize ;
103+ freeStackCtr = other.freeStackCtr ;
104+ other.invalidateLocal ();
109105 return *this ;
110106 }
111107
@@ -121,15 +117,15 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
121117 inline void free_addr (size_type addr, size_type bytes) noexcept
122118 {
123119 #ifdef _NBL_DEBUG
124- assert (addr>=Base ::combinedOffset && (addr-Base ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
120+ assert (addr>=base_t ::combinedOffset && (addr-base_t ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
125121 #endif // _NBL_DEBUG
126122 getFreeStack (freeStackCtr++) = addr;
127123 }
128124
129125 inline void reset ()
130126 {
131127 for (freeStackCtr=0u ; freeStackCtr<blockCount; freeStackCtr++)
132- getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+Base ::combinedOffset;
128+ getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+base_t ::combinedOffset;
133129 }
134130
135131 // ! conservative estimate, does not account for space lost to alignment
@@ -154,7 +150,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
154150 for (size_type i=0 ; i<freeStackCtr; i++)
155151 {
156152 auto freeAddr = getFreeStack (i);
157- if (freeAddr<sizeBound+Base ::combinedOffset)
153+ if (freeAddr<sizeBound+base_t ::combinedOffset)
158154 continue ;
159155
160156 tmpStackCopy[boundedCount++] = freeAddr;
@@ -165,7 +161,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
165161 std::make_heap (tmpStackCopy,tmpStackCopy+boundedCount);
166162 std::sort_heap (tmpStackCopy,tmpStackCopy+boundedCount);
167163 // could do sophisticated modified version of std::adjacent_find with a binary search, but F'it
168- size_type endAddr = (blockCount-1u )*blockSize+Base ::combinedOffset;
164+ size_type endAddr = (blockCount-1u )*blockSize+base_t ::combinedOffset;
169165 size_type i=0u ;
170166 for (;i<boundedCount; i++,endAddr-=blockSize)
171167 {
@@ -176,7 +172,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
176172 sizeBound -= i*blockSize;
177173 }
178174 }
179- return Base ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
175+ return base_t ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
180176 }
181177
182178
@@ -200,16 +196,36 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
200196 }
201197 inline size_type get_total_size () const noexcept
202198 {
203- return blockCount*blockSize+Base ::alignOffset;
199+ return blockCount*blockSize+base_t ::alignOffset;
204200 }
205201
206202
207203
208204 inline size_type addressToBlockID (size_type addr) const noexcept
209205 {
210- return (addr-Base ::combinedOffset)/blockSize;
206+ return (addr-base_t ::combinedOffset)/blockSize;
211207 }
212208 protected:
209+
210+ /* *
211+ * @brief Invalidates only fields from this class extension
212+ */
213+ void invalidateLocal ()
214+ {
215+ blockCount = invalid_address;
216+ blockSize = invalid_address;
217+ freeStackCtr = invalid_address;
218+ }
219+
220+ /* *
221+ * @brief Invalidates all fields
222+ */
223+ void invalidate ()
224+ {
225+ base_t::invalidate ();
226+ invalidateLocal ();
227+ }
228+
213229 size_type blockCount;
214230 size_type blockSize;
215231 // TODO: free address min-heap and allocated addresses max-heap, packed into the same memory (whatever is not allocated is free)
@@ -218,8 +234,8 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
218234 // but then should probably have two pool allocators, because doing that changes insertion/removal from O(1) to O(log(N))
219235 size_type freeStackCtr;
220236
221- inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(Base ::reservedSpace)[i];}
222- inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(Base ::reservedSpace)[i];}
237+ inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(base_t ::reservedSpace)[i];}
238+ inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(base_t ::reservedSpace)[i];}
223239};
224240
225241
0 commit comments