1 /** 2 * D binding to C++ std::allocator. 3 * 4 * Copyright: Copyright (c) 2019 D Language Foundation 5 * License: Distributed under the 6 * $(LINK2 http://www.boost.org/LICENSE_1_0.txt, Boost Software License 1.0). 7 * (See accompanying file LICENSE) 8 * Authors: Manu Evans 9 * Source: $(DRUNTIMESRC core/stdcpp/allocator.d) 10 */ 11 12 module core.stdcpp.allocator; 13 14 import core.stdcpp.new_; 15 import core.stdcpp.xutility : StdNamespace, __cpp_sized_deallocation, __cpp_aligned_new; 16 17 extern(C++, (StdNamespace)): 18 19 /** 20 * Allocators are classes that define memory models to be used by some parts of 21 * the C++ Standard Library, and most specifically, by STL containers. 22 */ 23 extern(C++, class) 24 struct allocator(T) 25 { 26 static assert(!is(T == const), "The C++ Standard forbids containers of const elements because allocator!(const T) is ill-formed."); 27 static assert(!is(T == immutable), "immutable is not representable in C++"); 28 static assert(!is(T == class), "Instantiation with `class` is not supported; D can't mangle the base (non-pointer) type of a class. Use `extern (C++, class) struct T { ... }` instead."); 29 extern(D): 30 31 /// 32 this(U)(ref allocator!U) {} 33 34 /// 35 alias size_type = size_t; 36 /// 37 alias difference_type = ptrdiff_t; 38 /// 39 alias pointer = T*; 40 /// 41 alias value_type = T; 42 43 /// 44 enum propagate_on_container_move_assignment = true; 45 /// 46 enum is_always_equal = true; 47 48 /// 49 alias rebind(U) = allocator!U; 50 51 version (CppRuntime_Microsoft) 52 { 53 import core.stdcpp.xutility : _MSC_VER; 54 55 /// 56 T* allocate(size_t count) @nogc 57 { 58 static if (_MSC_VER <= 1800) 59 { 60 import core.stdcpp.xutility : _Xbad_alloc; 61 if (count == 0) 62 return null; 63 void* mem; 64 if ((size_t.max / T.sizeof < count) || (mem = __cpp_new(count * T.sizeof)) is null) 65 _Xbad_alloc(); 66 return cast(T*)mem; 67 } 68 else 69 { 70 enum _Align = _New_alignof!T; 71 72 static size_t _Get_size_of_n(T)(const size_t _Count) 73 { 74 static if (T.sizeof == 1) 75 return _Count; 76 else 77 { 78 enum size_t _Max_possible = size_t.max / T.sizeof; 79 return _Max_possible < _Count ? size_t.max : _Count * T.sizeof; 80 } 81 } 82 83 const size_t _Bytes = _Get_size_of_n!T(count); 84 if (_Bytes == 0) 85 return null; 86 87 static if (!__cpp_aligned_new || _Align <= __STDCPP_DEFAULT_NEW_ALIGNMENT__) 88 { 89 version (INTEL_ARCH) 90 { 91 if (_Bytes >= _Big_allocation_threshold) 92 return cast(T*)_Allocate_manually_vector_aligned(_Bytes); 93 } 94 return cast(T*)__cpp_new(_Bytes); 95 } 96 else 97 { 98 size_t _Passed_align = _Align; 99 version (INTEL_ARCH) 100 { 101 if (_Bytes >= _Big_allocation_threshold) 102 _Passed_align = _Align < _Big_allocation_alignment ? _Big_allocation_alignment : _Align; 103 } 104 return cast(T*)__cpp_new_aligned(_Bytes, cast(align_val_t)_Passed_align); 105 } 106 } 107 } 108 /// 109 void deallocate(T* ptr, size_t count) @nogc 110 { 111 static if (_MSC_VER <= 1800) 112 { 113 __cpp_delete(ptr); 114 } 115 else 116 { 117 // this is observed from VS2017 118 void* _Ptr = ptr; 119 size_t _Bytes = T.sizeof * count; 120 121 enum _Align = _New_alignof!T; 122 static if (!__cpp_aligned_new || _Align <= __STDCPP_DEFAULT_NEW_ALIGNMENT__) 123 { 124 version (INTEL_ARCH) 125 { 126 if (_Bytes >= _Big_allocation_threshold) 127 _Adjust_manually_vector_aligned(_Ptr, _Bytes); 128 } 129 static if (_MSC_VER <= 1900) 130 __cpp_delete(ptr); 131 else 132 __cpp_delete_size(_Ptr, _Bytes); 133 } 134 else 135 { 136 size_t _Passed_align = _Align; 137 version (INTEL_ARCH) 138 { 139 if (_Bytes >= _Big_allocation_threshold) 140 _Passed_align = _Align < _Big_allocation_alignment ? _Big_allocation_alignment : _Align; 141 } 142 __cpp_delete_size_aligned(_Ptr, _Bytes, cast(align_val_t)_Passed_align); 143 } 144 } 145 } 146 147 /// 148 enum size_t max_size = size_t.max / T.sizeof; 149 } 150 else version (CppRuntime_Gcc) 151 { 152 /// 153 T* allocate(size_t count, const(void)* = null) @nogc 154 { 155 // if (count > max_size) 156 // std::__throw_bad_alloc(); 157 158 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__) 159 return cast(T*)__cpp_new_aligned(count * T.sizeof, cast(align_val_t)T.alignof); 160 else 161 return cast(T*)__cpp_new(count * T.sizeof); 162 } 163 /// 164 void deallocate(T* ptr, size_t count) @nogc 165 { 166 // NOTE: GCC doesn't seem to use the sized delete when it's available... 167 168 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__) 169 __cpp_delete_aligned(cast(void*)ptr, cast(align_val_t)T.alignof); 170 else 171 __cpp_delete(cast(void*)ptr); 172 } 173 174 /// 175 enum size_t max_size = (ptrdiff_t.max < size_t.max ? cast(size_t)ptrdiff_t.max : size_t.max) / T.sizeof; 176 } 177 else version (CppRuntime_Clang) 178 { 179 /// 180 T* allocate(size_t count, const(void)* = null) @nogc 181 { 182 // if (count > max_size) 183 // __throw_length_error("allocator!T.allocate(size_t n) 'n' exceeds maximum supported size"); 184 185 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__) 186 return cast(T*)__cpp_new_aligned(count * T.sizeof, cast(align_val_t)T.alignof); 187 else 188 return cast(T*)__cpp_new(count * T.sizeof); 189 } 190 /// 191 void deallocate(T* ptr, size_t count) @nogc 192 { 193 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__) 194 { 195 static if (__cpp_sized_deallocation) 196 return __cpp_delete_size_aligned(cast(void*)ptr, count * T.sizeof, cast(align_val_t)T.alignof); 197 else 198 return __cpp_delete_aligned(cast(void*)ptr, cast(align_val_t)T.alignof); 199 } 200 else static if (__cpp_sized_deallocation) 201 return __cpp_delete_size(cast(void*)ptr, count * T.sizeof); 202 else 203 return __cpp_delete(cast(void*)ptr); 204 } 205 206 /// 207 enum size_t max_size = size_t.max / T.sizeof; 208 } 209 else 210 { 211 static assert(false, "C++ runtime not supported"); 212 } 213 } 214 215 /// 216 extern(C++, (StdNamespace)) 217 struct allocator_traits(Alloc) 218 { 219 import core.internal.traits : isTrue; 220 221 /// 222 alias allocator_type = Alloc; 223 /// 224 alias value_type = allocator_type.value_type; 225 /// 226 alias size_type = allocator_type.size_type; 227 /// 228 alias difference_type = allocator_type.difference_type; 229 /// 230 alias pointer = allocator_type.pointer; 231 232 /// 233 enum propagate_on_container_copy_assignment = isTrue!(allocator_type, "propagate_on_container_copy_assignment"); 234 /// 235 enum propagate_on_container_move_assignment = isTrue!(allocator_type, "propagate_on_container_move_assignment"); 236 /// 237 enum propagate_on_container_swap = isTrue!(allocator_type, "propagate_on_container_swap"); 238 /// 239 enum is_always_equal = isTrue!(allocator_type, "is_always_equal"); 240 241 /// 242 template rebind_alloc(U) 243 { 244 static if (__traits(hasMember, allocator_type, "rebind")) 245 alias rebind_alloc = allocator_type.rebind!U; 246 else 247 alias rebind_alloc = allocator_type!U; 248 } 249 /// 250 alias rebind_traits(U) = allocator_traits!(rebind_alloc!U); 251 252 /// 253 static size_type max_size()(auto ref allocator_type a) 254 { 255 static if (__traits(hasMember, allocator_type, "max_size")) 256 return a.max_size(); 257 else 258 return size_type.max / value_type.sizeof; 259 } 260 261 /// 262 static allocator_type select_on_container_copy_construction()(auto ref allocator_type a) 263 { 264 static if (__traits(hasMember, allocator_type, "select_on_container_copy_construction")) 265 return a.select_on_container_copy_construction(); 266 else 267 return a; 268 } 269 } 270 271 private: 272 273 // MSVC has some bonus complexity! 274 version (CppRuntime_Microsoft) 275 { 276 // some versions of VS require a `* const` pointer mangling hack 277 // we need a way to supply the target VS version to the compile 278 version = NeedsMangleHack; 279 280 version (X86) 281 version = INTEL_ARCH; 282 version (X86_64) 283 version = INTEL_ARCH; 284 285 // HACK: should we guess _DEBUG for `debug` builds? 286 version (_DEBUG) 287 enum _DEBUG = true; 288 else version (NDEBUG) 289 enum _DEBUG = false; 290 else 291 { 292 import core.stdcpp.xutility : __CXXLIB__; 293 enum _DEBUG = __CXXLIB__.length && 'd' == __CXXLIB__[$-1]; // libcmtd, msvcrtd 294 } 295 296 enum _New_alignof(T) = T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__ ? T.alignof : __STDCPP_DEFAULT_NEW_ALIGNMENT__; 297 298 version (INTEL_ARCH) 299 { 300 enum size_t _Big_allocation_threshold = 4096; 301 enum size_t _Big_allocation_alignment = 32; 302 303 static assert(2 * (void*).sizeof <= _Big_allocation_alignment, "Big allocation alignment should at least match vector register alignment"); 304 static assert((v => v != 0 && (v & (v - 1)) == 0)(_Big_allocation_alignment), "Big allocation alignment must be a power of two"); 305 static assert(size_t.sizeof == (void*).sizeof, "uintptr_t is not the same size as size_t"); 306 307 // NOTE: this must track `_DEBUG` macro used in C++... 308 static if (_DEBUG) 309 enum size_t _Non_user_size = 2 * (void*).sizeof + _Big_allocation_alignment - 1; 310 else 311 enum size_t _Non_user_size = (void*).sizeof + _Big_allocation_alignment - 1; 312 313 version (Win64) 314 enum size_t _Big_allocation_sentinel = 0xFAFAFAFAFAFAFAFA; 315 else 316 enum size_t _Big_allocation_sentinel = 0xFAFAFAFA; 317 318 extern(D) // Template so it gets compiled according to _DEBUG. 319 void* _Allocate_manually_vector_aligned()(const size_t _Bytes) @nogc 320 { 321 size_t _Block_size = _Non_user_size + _Bytes; 322 if (_Block_size <= _Bytes) 323 _Block_size = size_t.max; 324 325 const size_t _Ptr_container = cast(size_t)__cpp_new(_Block_size); 326 if (!(_Ptr_container != 0)) 327 assert(false, "invalid argument"); 328 void* _Ptr = cast(void*)((_Ptr_container + _Non_user_size) & ~(_Big_allocation_alignment - 1)); 329 (cast(size_t*)_Ptr)[-1] = _Ptr_container; 330 331 static if (_DEBUG) 332 (cast(size_t*)_Ptr)[-2] = _Big_allocation_sentinel; 333 return (_Ptr); 334 } 335 336 extern(D) // Template so it gets compiled according to _DEBUG. 337 void _Adjust_manually_vector_aligned()(ref void* _Ptr, ref size_t _Bytes) pure nothrow @nogc 338 { 339 _Bytes += _Non_user_size; 340 341 const size_t* _Ptr_user = cast(size_t*)_Ptr; 342 const size_t _Ptr_container = _Ptr_user[-1]; 343 344 // If the following asserts, it likely means that we are performing 345 // an aligned delete on memory coming from an unaligned allocation. 346 static if (_DEBUG) 347 assert(_Ptr_user[-2] == _Big_allocation_sentinel, "invalid argument"); 348 349 // Extra paranoia on aligned allocation/deallocation; ensure _Ptr_container is 350 // in range [_Min_back_shift, _Non_user_size] 351 static if (_DEBUG) 352 enum size_t _Min_back_shift = 2 * (void*).sizeof; 353 else 354 enum size_t _Min_back_shift = (void*).sizeof; 355 356 const size_t _Back_shift = cast(size_t)_Ptr - _Ptr_container; 357 if (!(_Back_shift >= _Min_back_shift && _Back_shift <= _Non_user_size)) 358 assert(false, "invalid argument"); 359 _Ptr = cast(void*)_Ptr_container; 360 } 361 } 362 } 363 version (CppRuntime_Clang) 364 { 365 // Helper for container swap 366 package(core.stdcpp) void __swap_allocator(Alloc)(ref Alloc __a1, ref Alloc __a2) 367 { 368 import core.internal.lifetime : swap; 369 370 static if (allocator_traits!Alloc.propagate_on_container_swap) 371 swap(__a1, __a2); 372 } 373 }