1 /// 2 module stdx.allocator.building_blocks.segregator; 3 4 import stdx.allocator.common; 5 6 /** 7 Dispatches allocations (and deallocations) between two allocators ($(D 8 SmallAllocator) and $(D LargeAllocator)) depending on the size allocated, as 9 follows. All allocations smaller than or equal to $(D threshold) will be 10 dispatched to $(D SmallAllocator). The others will go to $(D LargeAllocator). 11 12 If both allocators are $(D shared), the $(D Segregator) will also offer $(D 13 shared) methods. 14 */ 15 struct Segregator(size_t threshold, SmallAllocator, LargeAllocator) 16 { 17 import mir.utility : min; 18 import stdx.allocator.internal : Ternary; 19 20 static if (stateSize!SmallAllocator) private SmallAllocator _small; 21 else private alias _small = SmallAllocator.instance; 22 static if (stateSize!LargeAllocator) private LargeAllocator _large; 23 else private alias _large = LargeAllocator.instance; 24 25 version (StdDdoc) 26 { 27 /** 28 The alignment offered is the minimum of the two allocators' alignment. 29 */ 30 enum uint alignment; 31 /** 32 This method is defined only if at least one of the allocators defines 33 it. The good allocation size is obtained from $(D SmallAllocator) if $(D 34 s <= threshold), or $(D LargeAllocator) otherwise. (If one of the 35 allocators does not define $(D goodAllocSize), the default 36 implementation in this module applies.) 37 */ 38 static size_t goodAllocSize(size_t s); 39 /** 40 The memory is obtained from $(D SmallAllocator) if $(D s <= threshold), 41 or $(D LargeAllocator) otherwise. 42 */ 43 void[] allocate(size_t); 44 /** 45 This method is defined if both allocators define it, and forwards to 46 $(D SmallAllocator) or $(D LargeAllocator) appropriately. 47 */ 48 void[] alignedAllocate(size_t, uint); 49 /** 50 This method is defined only if at least one of the allocators defines 51 it. If $(D SmallAllocator) defines $(D expand) and $(D b.length + 52 delta <= threshold), the call is forwarded to $(D SmallAllocator). If $(D 53 LargeAllocator) defines $(D expand) and $(D b.length > threshold), the 54 call is forwarded to $(D LargeAllocator). Otherwise, the call returns 55 $(D false). 56 */ 57 bool expand(ref void[] b, size_t delta); 58 /** 59 This method is defined only if at least one of the allocators defines 60 it. If $(D SmallAllocator) defines $(D reallocate) and $(D b.length <= 61 threshold && s <= threshold), the call is forwarded to $(D 62 SmallAllocator). If $(D LargeAllocator) defines $(D expand) and $(D 63 b.length > threshold && s > threshold), the call is forwarded to $(D 64 LargeAllocator). Otherwise, the call returns $(D false). 65 */ 66 bool reallocate(ref void[] b, size_t s); 67 /** 68 This method is defined only if at least one of the allocators defines 69 it, and work similarly to $(D reallocate). 70 */ 71 bool alignedReallocate(ref void[] b, size_t s); 72 /** 73 This method is defined only if both allocators define it. The call is 74 forwarded to $(D SmallAllocator) if $(D b.length <= threshold), or $(D 75 LargeAllocator) otherwise. 76 */ 77 Ternary owns(void[] b); 78 /** 79 This function is defined only if both allocators define it, and forwards 80 appropriately depending on $(D b.length). 81 */ 82 bool deallocate(void[] b); 83 /** 84 This function is defined only if both allocators define it, and calls 85 $(D deallocateAll) for them in turn. 86 */ 87 bool deallocateAll(); 88 /** 89 This function is defined only if both allocators define it, and returns 90 the conjunction of $(D empty) calls for the two. 91 */ 92 Ternary empty(); 93 } 94 95 /** 96 Composite allocators involving nested instantiations of $(D Segregator) make 97 it difficult to access individual sub-allocators stored within. $(D 98 allocatorForSize) simplifies the task by supplying the allocator nested 99 inside a $(D Segregator) that is responsible for a specific size $(D s). 100 101 Example: 102 ---- 103 alias A = Segregator!(300, 104 Segregator!(200, A1, A2), 105 A3); 106 A a; 107 static assert(typeof(a.allocatorForSize!10) == A1); 108 static assert(typeof(a.allocatorForSize!250) == A2); 109 static assert(typeof(a.allocatorForSize!301) == A3); 110 ---- 111 */ 112 ref auto allocatorForSize(size_t s)() 113 { 114 static if (s <= threshold) 115 static if (is(SmallAllocator == Segregator!(Args), Args...)) 116 return _small.allocatorForSize!s; 117 else return _small; 118 else 119 static if (is(LargeAllocator == Segregator!(Args), Args...)) 120 return _large.allocatorForSize!s; 121 else return _large; 122 } 123 124 enum uint alignment = min(SmallAllocator.alignment, 125 LargeAllocator.alignment); 126 127 private template Impl() 128 { 129 size_t goodAllocSize(size_t s) 130 { 131 return s <= threshold 132 ? _small.goodAllocSize(s) 133 : _large.goodAllocSize(s); 134 } 135 136 void[] allocate(size_t s) 137 { 138 return s <= threshold ? _small.allocate(s) : _large.allocate(s); 139 } 140 141 static if (__traits(hasMember, SmallAllocator, "alignedAllocate") 142 && __traits(hasMember, LargeAllocator, "alignedAllocate")) 143 void[] alignedAllocate(size_t s, uint a) 144 { 145 return s <= threshold 146 ? _small.alignedAllocate(s, a) 147 : _large.alignedAllocate(s, a); 148 } 149 150 static if (__traits(hasMember, SmallAllocator, "expand") 151 || __traits(hasMember, LargeAllocator, "expand")) 152 bool expand(ref void[] b, size_t delta) 153 { 154 if (!delta) return true; 155 if (b.length + delta <= threshold) 156 { 157 // Old and new allocations handled by _small 158 static if (__traits(hasMember, SmallAllocator, "expand")) 159 return _small.expand(b, delta); 160 else 161 return false; 162 } 163 if (b.length > threshold) 164 { 165 // Old and new allocations handled by _large 166 static if (__traits(hasMember, LargeAllocator, "expand")) 167 return _large.expand(b, delta); 168 else 169 return false; 170 } 171 // Oops, cross-allocator transgression 172 return false; 173 } 174 175 static if (__traits(hasMember, SmallAllocator, "reallocate") 176 || __traits(hasMember, LargeAllocator, "reallocate")) 177 bool reallocate(ref void[] b, size_t s) 178 { 179 static if (__traits(hasMember, SmallAllocator, "reallocate")) 180 if (b.length <= threshold && s <= threshold) 181 { 182 // Old and new allocations handled by _small 183 return _small.reallocate(b, s); 184 } 185 static if (__traits(hasMember, LargeAllocator, "reallocate")) 186 if (b.length > threshold && s > threshold) 187 { 188 // Old and new allocations handled by _large 189 return _large.reallocate(b, s); 190 } 191 // Cross-allocator transgression 192 static if (!__traits(hasMember, typeof(this), "instance")) 193 return .reallocate(this, b, s); 194 else 195 return .reallocate(instance, b, s); 196 } 197 198 static if (__traits(hasMember, SmallAllocator, "alignedReallocate") 199 || __traits(hasMember, LargeAllocator, "alignedReallocate")) 200 bool alignedReallocate(ref void[] b, size_t s) 201 { 202 static if (__traits(hasMember, SmallAllocator, "alignedReallocate")) 203 if (b.length <= threshold && s <= threshold) 204 { 205 // Old and new allocations handled by _small 206 return _small.alignedReallocate(b, s); 207 } 208 static if (__traits(hasMember, LargeAllocator, "alignedReallocate")) 209 if (b.length > threshold && s > threshold) 210 { 211 // Old and new allocations handled by _large 212 return _large.alignedReallocate(b, s); 213 } 214 // Cross-allocator transgression 215 static if (!__traits(hasMember, typeof(this), "instance")) 216 return .alignedReallocate(this, b, s); 217 else 218 return .alignedReallocate(instance, b, s); 219 } 220 221 static if (__traits(hasMember, SmallAllocator, "owns") 222 && __traits(hasMember, LargeAllocator, "owns")) 223 Ternary owns(void[] b) 224 { 225 return Ternary(b.length <= threshold 226 ? _small.owns(b) : _large.owns(b)); 227 } 228 229 static if (__traits(hasMember, SmallAllocator, "deallocate") 230 && __traits(hasMember, LargeAllocator, "deallocate")) 231 bool deallocate(void[] data) 232 { 233 return data.length <= threshold 234 ? _small.deallocate(data) 235 : _large.deallocate(data); 236 } 237 238 static if (__traits(hasMember, SmallAllocator, "deallocateAll") 239 && __traits(hasMember, LargeAllocator, "deallocateAll")) 240 bool deallocateAll() 241 { 242 // Use & insted of && to evaluate both 243 return _small.deallocateAll() & _large.deallocateAll(); 244 } 245 246 static if (__traits(hasMember, SmallAllocator, "empty") 247 && __traits(hasMember, LargeAllocator, "empty")) 248 Ternary empty() 249 { 250 return _small.empty & _large.empty; 251 } 252 253 static if (__traits(hasMember, SmallAllocator, "resolveInternalPointer") 254 && __traits(hasMember, LargeAllocator, "resolveInternalPointer")) 255 Ternary resolveInternalPointer(const void* p, ref void[] result) 256 { 257 Ternary r = _small.resolveInternalPointer(p, result); 258 return r == Ternary.no ? _large.resolveInternalPointer(p, result) : r; 259 } 260 } 261 262 private enum sharedMethods = 263 !stateSize!SmallAllocator 264 && !stateSize!LargeAllocator 265 && is(typeof(SmallAllocator.instance) == shared) 266 && is(typeof(LargeAllocator.instance) == shared); 267 268 static if (sharedMethods) 269 { // for backward compatability 270 enum shared Segregator instance = Segregator(); 271 static { mixin Impl!(); } 272 } 273 else 274 { 275 static if (!stateSize!SmallAllocator && !stateSize!LargeAllocator) 276 { 277 enum shared Segregator instance = Segregator(); 278 static { mixin Impl!(); } 279 } 280 else 281 { 282 mixin Impl!(); 283 } 284 } 285 } 286 287 /// 288 @system unittest 289 { 290 import stdx.allocator.building_blocks.free_list : FreeList; 291 import stdx.allocator.gc_allocator : GCAllocator; 292 import stdx.allocator.mallocator : Mallocator; 293 alias A = 294 Segregator!( 295 1024 * 4, 296 Segregator!( 297 128, FreeList!(Mallocator, 0, 128), 298 GCAllocator), 299 Segregator!( 300 1024 * 1024, Mallocator, 301 GCAllocator) 302 ); 303 A a; 304 auto b = a.allocate(200); 305 assert(b.length == 200); 306 a.deallocate(b); 307 } 308 309 /** 310 A $(D Segregator) with more than three arguments expands to a composition of 311 elemental $(D Segregator)s, as illustrated by the following example: 312 313 ---- 314 alias A = 315 Segregator!( 316 n1, A1, 317 n2, A2, 318 n3, A3, 319 A4 320 ); 321 ---- 322 323 With this definition, allocation requests for $(D n1) bytes or less are directed 324 to $(D A1); requests between $(D n1 + 1) and $(D n2) bytes (inclusive) are 325 directed to $(D A2); requests between $(D n2 + 1) and $(D n3) bytes (inclusive) 326 are directed to $(D A3); and requests for more than $(D n3) bytes are directed 327 to $(D A4). If some particular range should not be handled, $(D NullAllocator) 328 may be used appropriately. 329 330 */ 331 template Segregator(Args...) 332 if (Args.length > 3) 333 { 334 // Binary search 335 private enum cutPoint = ((Args.length - 2) / 4) * 2; 336 static if (cutPoint >= 2) 337 { 338 alias Segregator = .Segregator!( 339 Args[cutPoint], 340 .Segregator!(Args[0 .. cutPoint], Args[cutPoint + 1]), 341 .Segregator!(Args[cutPoint + 2 .. $]) 342 ); 343 } 344 else 345 { 346 // Favor small sizes 347 alias Segregator = .Segregator!( 348 Args[0], 349 Args[1], 350 .Segregator!(Args[2 .. $]) 351 ); 352 } 353 } 354 355 /// 356 @system unittest 357 { 358 import stdx.allocator.building_blocks.free_list : FreeList; 359 import stdx.allocator.gc_allocator : GCAllocator; 360 import stdx.allocator.mallocator : Mallocator; 361 alias A = 362 Segregator!( 363 128, FreeList!(Mallocator, 0, 128), 364 1024 * 4, GCAllocator, 365 1024 * 1024, Mallocator, 366 GCAllocator 367 ); 368 A a; 369 auto b = a.allocate(201); 370 assert(b.length == 201); 371 a.deallocate(b); 372 }