@@ -20,32 +20,32 @@ template<typename _size_type>
20
20
class PoolAddressAllocator : public AddressAllocatorBase <PoolAddressAllocator<_size_type>,_size_type>
21
21
{
22
22
private:
23
- typedef AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type> Base ;
23
+ using base_t = AddressAllocatorBase<PoolAddressAllocator<_size_type>,_size_type>;
24
24
25
25
void copyState (const PoolAddressAllocator& other, _size_type newBuffSz)
26
26
{
27
27
if (blockCount>other.blockCount )
28
28
freeStackCtr = blockCount-other.blockCount ;
29
29
30
30
#ifdef _NBL_DEBUG
31
- assert (Base::checkResize (newBuffSz,Base::alignOffset));
32
- assert (freeStackCtr==0u );
31
+ assert (base_t::checkResize (newBuffSz,base_t ::alignOffset));
33
32
#endif // _NBL_DEBUG
34
33
35
34
for (_size_type i=0u ; i<freeStackCtr; i++)
36
- getFreeStack (i) = (blockCount-1u -i)*blockSize+Base ::combinedOffset;
35
+ getFreeStack (i) = (blockCount-1u -i)*blockSize+base_t ::combinedOffset;
37
36
38
37
for (_size_type i=0 ; i<other.freeStackCtr ; i++)
39
38
{
40
- _size_type freeEntry = other.getFreeStack (i)-other.combinedOffset ;
39
+ _size_type freeEntry = other.getFreeStack (i)-other.base_t :: combinedOffset;
41
40
// check in case of shrink
42
41
if (freeEntry<blockCount*blockSize)
43
- getFreeStack (freeStackCtr++) = freeEntry+Base ::combinedOffset;
42
+ getFreeStack (freeStackCtr++) = freeEntry+base_t ::combinedOffset;
44
43
}
45
44
}
45
+
46
46
inline bool safe_shrink_size_common (_size_type& sizeBound, _size_type newBuffAlignmentWeCanGuarantee) noexcept
47
47
{
48
- _size_type capacity = get_total_size ()-Base ::alignOffset;
48
+ _size_type capacity = get_total_size ()-base_t ::alignOffset;
49
49
if (sizeBound>=capacity)
50
50
return false ;
51
51
@@ -71,7 +71,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
71
71
virtual ~PoolAddressAllocator () {}
72
72
73
73
PoolAddressAllocator (void * reservedSpc, _size_type addressOffsetToApply, _size_type alignOffsetNeeded, _size_type maxAllocatableAlignment, size_type bufSz, size_type blockSz) noexcept :
74
- Base (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
74
+ base_t (reservedSpc,addressOffsetToApply,alignOffsetNeeded,maxAllocatableAlignment),
75
75
blockCount ((bufSz-alignOffsetNeeded)/blockSz), blockSize(blockSz), freeStackCtr(0u )
76
76
{
77
77
reset ();
@@ -80,29 +80,28 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
80
80
// ! When resizing we require that the copying of data buffer has already been handled by the user of the address allocator
81
81
template <typename ... Args>
82
82
PoolAddressAllocator (_size_type newBuffSz, PoolAddressAllocator&& other, Args&&... args) noexcept :
83
- Base (std::move( other) ,std::forward<Args>(args)...),
84
- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
83
+ base_t ( other,std::forward<Args>(args)...),
84
+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
85
85
{
86
86
copyState (other, newBuffSz);
87
87
88
- other.blockCount = invalid_address;
89
- other.blockSize = invalid_address;
90
- other.freeStackCtr = invalid_address;
88
+ other.invalidate ();
91
89
}
92
90
template <typename ... Args>
93
91
PoolAddressAllocator (_size_type newBuffSz, const PoolAddressAllocator& other, Args&&... args) noexcept :
94
- Base (other, std::forward<Args>(args)...),
95
- blockCount ((newBuffSz-Base ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
92
+ base_t (other, std::forward<Args>(args)...),
93
+ blockCount ((newBuffSz-base_t ::alignOffset)/other.blockSize), blockSize(other.blockSize), freeStackCtr(0u )
96
94
{
97
95
copyState (other, newBuffSz);
98
96
}
99
97
100
98
PoolAddressAllocator& operator =(PoolAddressAllocator&& other)
101
99
{
102
- Base::operator =(std::move (other));
103
- std::swap (blockCount,other.blockCount );
104
- std::swap (blockSize,other.blockSize );
105
- std::swap (freeStackCtr,other.freeStackCtr );
100
+ base_t ::operator =(std::move (other));
101
+ blockCount = other.blockCount ;
102
+ blockSize = other.blockSize ;
103
+ freeStackCtr = other.freeStackCtr ;
104
+ other.invalidateLocal ();
106
105
return *this ;
107
106
}
108
107
@@ -118,15 +117,15 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
118
117
inline void free_addr (size_type addr, size_type bytes) noexcept
119
118
{
120
119
#ifdef _NBL_DEBUG
121
- assert (addr>=Base ::combinedOffset && (addr-Base ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
120
+ assert (addr>=base_t ::combinedOffset && (addr-base_t ::combinedOffset)%blockSize==0 && freeStackCtr<blockCount);
122
121
#endif // _NBL_DEBUG
123
122
getFreeStack (freeStackCtr++) = addr;
124
123
}
125
124
126
125
inline void reset ()
127
126
{
128
127
for (freeStackCtr=0u ; freeStackCtr<blockCount; freeStackCtr++)
129
- getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+Base ::combinedOffset;
128
+ getFreeStack (freeStackCtr) = (blockCount-1u -freeStackCtr)*blockSize+base_t ::combinedOffset;
130
129
}
131
130
132
131
// ! conservative estimate, does not account for space lost to alignment
@@ -151,7 +150,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
151
150
for (size_type i=0 ; i<freeStackCtr; i++)
152
151
{
153
152
auto freeAddr = getFreeStack (i);
154
- if (freeAddr<sizeBound+Base ::combinedOffset)
153
+ if (freeAddr<sizeBound+base_t ::combinedOffset)
155
154
continue ;
156
155
157
156
tmpStackCopy[boundedCount++] = freeAddr;
@@ -162,7 +161,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
162
161
std::make_heap (tmpStackCopy,tmpStackCopy+boundedCount);
163
162
std::sort_heap (tmpStackCopy,tmpStackCopy+boundedCount);
164
163
// could do sophisticated modified version of std::adjacent_find with a binary search, but F'it
165
- size_type endAddr = (blockCount-1u )*blockSize+Base ::combinedOffset;
164
+ size_type endAddr = (blockCount-1u )*blockSize+base_t ::combinedOffset;
166
165
size_type i=0u ;
167
166
for (;i<boundedCount; i++,endAddr-=blockSize)
168
167
{
@@ -173,7 +172,7 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
173
172
sizeBound -= i*blockSize;
174
173
}
175
174
}
176
- return Base ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
175
+ return base_t ::safe_shrink_size (sizeBound,newBuffAlignmentWeCanGuarantee);
177
176
}
178
177
179
178
@@ -197,16 +196,36 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
197
196
}
198
197
inline size_type get_total_size () const noexcept
199
198
{
200
- return blockCount*blockSize+Base ::alignOffset;
199
+ return blockCount*blockSize+base_t ::alignOffset;
201
200
}
202
201
203
202
204
203
205
204
inline size_type addressToBlockID (size_type addr) const noexcept
206
205
{
207
- return (addr-Base ::combinedOffset)/blockSize;
206
+ return (addr-base_t ::combinedOffset)/blockSize;
208
207
}
209
208
protected:
209
+
210
+ /* *
211
+ * @brief Invalidates only fields from this class extension
212
+ */
213
+ void invalidateLocal ()
214
+ {
215
+ blockCount = invalid_address;
216
+ blockSize = invalid_address;
217
+ freeStackCtr = invalid_address;
218
+ }
219
+
220
+ /* *
221
+ * @brief Invalidates all fields
222
+ */
223
+ void invalidate ()
224
+ {
225
+ base_t::invalidate ();
226
+ invalidateLocal ();
227
+ }
228
+
210
229
size_type blockCount;
211
230
size_type blockSize;
212
231
// TODO: free address min-heap and allocated addresses max-heap, packed into the same memory (whatever is not allocated is free)
@@ -215,8 +234,8 @@ class PoolAddressAllocator : public AddressAllocatorBase<PoolAddressAllocator<_s
215
234
// but then should probably have two pool allocators, because doing that changes insertion/removal from O(1) to O(log(N))
216
235
size_type freeStackCtr;
217
236
218
- inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(Base ::reservedSpace)[i];}
219
- inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(Base ::reservedSpace)[i];}
237
+ inline size_type& getFreeStack (size_type i) {return reinterpret_cast <size_type*>(base_t ::reservedSpace)[i];}
238
+ inline const size_type& getFreeStack (size_type i) const {return reinterpret_cast <const size_type*>(base_t ::reservedSpace)[i];}
220
239
};
221
240
222
241
0 commit comments