1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_MEMORY_ALLOCATION_HPP
26 #define SHARE_MEMORY_ALLOCATION_HPP
27
28 #include "cppstdlib/new.hpp"
29 #include "memory/allStatic.hpp"
30 #include "nmt/memTag.hpp"
31 #include "utilities/debug.hpp"
32 #include "utilities/globalDefinitions.hpp"
33 #include "utilities/macros.hpp"
34
35 class outputStream;
36 class Thread;
37 class JavaThread;
38
39 class AllocFailStrategy {
40 public:
41 enum AllocFailEnum { EXIT_OOM, RETURN_NULL };
42 };
43 typedef AllocFailStrategy::AllocFailEnum AllocFailType;
44
45 // The virtual machine must never call one of the implicitly declared
46 // global allocation or deletion functions. (Such calls may result in
47 // link-time or run-time errors.) For convenience and documentation of
48 // intended use, classes in the virtual machine may be derived from one
49 // of the following allocation classes, some of which define allocation
50 // and deletion functions.
51 // Note: std::malloc and std::free should never called directly.
52
53 //
54 // For objects allocated in the resource area (see resourceArea.hpp).
55 // - ResourceObj
56 //
57 // For objects allocated in the C-heap (managed by: free & malloc and tracked with NMT)
58 // - CHeapObj
59 //
60 // For objects allocated on the stack.
61 // - StackObj
62 //
63 // For classes used as name spaces.
64 // - AllStatic
65 //
66 // For classes in Metaspace (class data)
67 // - MetaspaceObj
68 //
69 // The printable subclasses are used for debugging and define virtual
70 // member functions for printing. Classes that avoid allocating the
71 // vtbl entries in the objects should therefore not be the printable
72 // subclasses.
73 //
74 // The following macros and function should be used to allocate memory
75 // directly in the resource area or in the C-heap, The _OBJ variants
76 // of the NEW/FREE_C_HEAP macros are used for alloc/dealloc simple
77 // objects which are not inherited from CHeapObj, note constructor and
78 // destructor are not called. The preferable way to allocate objects
79 // is using the new operator.
80 //
81 // WARNING: The array variant must only be used for a homogeneous array
82 // where all objects are of the exact type specified. If subtypes are
83 // stored in the array then must pay attention to calling destructors
84 // at needed.
85 //
86 // NEW_RESOURCE_ARRAY*
87 // REALLOC_RESOURCE_ARRAY*
88 // FREE_RESOURCE_ARRAY*
89 // NEW_RESOURCE_OBJ*
90 // NEW_C_HEAP_ARRAY*
91 // REALLOC_C_HEAP_ARRAY*
92 // FREE_C_HEAP_ARRAY*
93 // NEW_C_HEAP_OBJ*
94 // FREE_C_HEAP_OBJ
95 //
96 // char* AllocateHeap(size_t size, MemTag mem_tag, const NativeCallStack& stack, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
97 // char* AllocateHeap(size_t size, MemTag mem_tag, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
98 // char* ReallocateHeap(char *old, size_t size, MemTag mem_tag, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
99 // void FreeHeap(void* p);
100 //
101
102 extern bool NMT_track_callsite;
103
104 class NativeCallStack;
105
106
107 char* AllocateHeap(size_t size,
108 MemTag mem_tag,
109 const NativeCallStack& stack,
110 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
111 char* AllocateHeap(size_t size,
112 MemTag mem_tag,
113 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
114
115 char* ReallocateHeap(char *old,
116 size_t size,
117 MemTag mem_tag,
118 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
119
120 // handles null pointers
121 void FreeHeap(void* p);
122
123 class CHeapObjBase {
124 public:
125 ALWAYSINLINE void* operator new(size_t size, MemTag mem_tag) {
126 return AllocateHeap(size, mem_tag);
127 }
128
129 ALWAYSINLINE void* operator new(size_t size,
130 MemTag mem_tag,
131 const NativeCallStack& stack) {
132 return AllocateHeap(size, mem_tag, stack);
133 }
134
135 ALWAYSINLINE void* operator new(size_t size,
136 MemTag mem_tag,
137 const std::nothrow_t&,
138 const NativeCallStack& stack) throw() {
139 return AllocateHeap(size, mem_tag, stack, AllocFailStrategy::RETURN_NULL);
140 }
141
142 ALWAYSINLINE void* operator new(size_t size,
143 MemTag mem_tag,
144 const std::nothrow_t&) throw() {
145 return AllocateHeap(size, mem_tag, AllocFailStrategy::RETURN_NULL);
146 }
147
148 ALWAYSINLINE void* operator new[](size_t size, MemTag mem_tag) {
149 return AllocateHeap(size, mem_tag);
150 }
151
152 ALWAYSINLINE void* operator new[](size_t size,
153 MemTag mem_tag,
154 const NativeCallStack& stack) {
155 return AllocateHeap(size, mem_tag, stack);
156 }
157
158 ALWAYSINLINE void* operator new[](size_t size,
159 MemTag mem_tag,
160 const std::nothrow_t&,
161 const NativeCallStack& stack) throw() {
162 return AllocateHeap(size, mem_tag, stack, AllocFailStrategy::RETURN_NULL);
163 }
164
165 ALWAYSINLINE void* operator new[](size_t size,
166 MemTag mem_tag,
167 const std::nothrow_t&) throw() {
168 return AllocateHeap(size, mem_tag, AllocFailStrategy::RETURN_NULL);
169 }
170
171 void operator delete(void* p) { FreeHeap(p); }
172 void operator delete [] (void* p) { FreeHeap(p); }
173 };
174
175 // Uses the implicitly static new and delete operators of CHeapObjBase
176 template<MemTag MT>
177 class CHeapObj {
178 public:
179 ALWAYSINLINE void* operator new(size_t size) {
180 return CHeapObjBase::operator new(size, MT);
181 }
182
183 ALWAYSINLINE void* operator new(size_t size,
184 const NativeCallStack& stack) {
185 return CHeapObjBase::operator new(size, MT, stack);
186 }
187
188 ALWAYSINLINE void* operator new(size_t size, const std::nothrow_t& nt,
189 const NativeCallStack& stack) throw() {
190 return CHeapObjBase::operator new(size, MT, nt, stack);
191 }
192
193 ALWAYSINLINE void* operator new(size_t size, const std::nothrow_t& nt) throw() {
194 return CHeapObjBase::operator new(size, MT, nt);
195 }
196
197 ALWAYSINLINE void* operator new[](size_t size) {
198 return CHeapObjBase::operator new[](size, MT);
199 }
200
201 ALWAYSINLINE void* operator new[](size_t size,
202 const NativeCallStack& stack) {
203 return CHeapObjBase::operator new[](size, MT, stack);
204 }
205
206 ALWAYSINLINE void* operator new[](size_t size, const std::nothrow_t& nt,
207 const NativeCallStack& stack) throw() {
208 return CHeapObjBase::operator new[](size, MT, nt, stack);
209 }
210
211 ALWAYSINLINE void* operator new[](size_t size, const std::nothrow_t& nt) throw() {
212 return CHeapObjBase::operator new[](size, MT, nt);
213 }
214
215 void operator delete(void* p) {
216 CHeapObjBase::operator delete(p);
217 }
218
219 void operator delete [] (void* p) {
220 CHeapObjBase::operator delete[](p);
221 }
222 };
223
224 // Base class for objects allocated on the stack only.
225 // Calling new or delete will result in fatal error.
226
227 class StackObj {
228 public:
229 void* operator new(size_t size) = delete;
230 void* operator new [](size_t size) = delete;
231 void operator delete(void* p) = delete;
232 void operator delete [](void* p) = delete;
233 };
234
235 // Base class for objects stored in Metaspace.
236 // Calling delete will result in fatal error.
237 //
238 // Do not inherit from something with a vptr because this class does
239 // not introduce one. This class is used to allocate both shared read-only
240 // and shared read-write classes.
241 //
242
243 class ClassLoaderData;
244 class MetaspaceClosure;
245
246 class MetaspaceObj {
247 // There are functions that all subtypes of MetaspaceObj are expected
248 // to implement, so that templates which are defined for this class hierarchy
249 // can work uniformly. Within the sub-hierarchy of Metadata, these are virtuals.
250 // Elsewhere in the hierarchy of MetaspaceObj, type(), size(), and/or on_stack()
251 // can be static if constant.
252 //
253 // The following functions are required by MetaspaceClosure:
254 // void metaspace_pointers_do(MetaspaceClosure* it) { <walk my refs> }
255 // int size() const { return align_up(sizeof(<This>), wordSize) / wordSize; }
256 // MetaspaceObj::Type type() const { return <This>Type; }
257 //
258 // The following functions are required by MetadataFactory::free_metadata():
259 // bool on_stack() { return false; }
260 // void deallocate_contents(ClassLoaderData* loader_data);
261
262 friend class VMStructs;
263 // All metsapce objects in the AOT cache (CDS archive) are mapped
264 // into a single contiguous memory block, so we can use these
265 // two pointers to quickly determine if a MetaspaceObj is in the
266 // AOT cache.
267 // When AOT/CDS is not enabled, both pointers are set to null.
268 static void* _aot_metaspace_base; // (inclusive) low address
269 static void* _aot_metaspace_top; // (exclusive) high address
270
271 public:
272
273 // Returns true if the pointer points to a valid MetaspaceObj. A valid
274 // MetaspaceObj is MetaWord-aligned and contained within either
275 // regular- or aot metaspace.
276 static bool is_valid(const MetaspaceObj* p);
277
278 #if INCLUDE_CDS
279 static bool in_aot_cache(const MetaspaceObj* p) {
280 // If no shared metaspace regions are mapped, _aot_metaspace_{base,top} will
281 // both be null and all values of p will be rejected quickly.
282 return (((void*)p) < _aot_metaspace_top &&
283 ((void*)p) >= _aot_metaspace_base);
284 }
285 bool in_aot_cache() const { return MetaspaceObj::in_aot_cache(this); }
286 #else
287 static bool in_aot_cache(const MetaspaceObj* p) { return false; }
288 bool in_aot_cache() const { return false; }
289 #endif
290
291 void print_address_on(outputStream* st) const; // nonvirtual address printing
292
293 static void set_aot_metaspace_range(void* base, void* top) {
294 _aot_metaspace_base = base;
295 _aot_metaspace_top = top;
296 }
297
298 static void* aot_metaspace_base() { return _aot_metaspace_base; }
299 static void* aot_metaspace_top() { return _aot_metaspace_top; }
300
301 #define METASPACE_OBJ_TYPES_DO(f) \
302 f(Class) \
303 f(Symbol) \
304 f(TypeArrayU1) \
305 f(TypeArrayU2) \
306 f(TypeArrayU4) \
307 f(TypeArrayU8) \
308 f(TypeArrayOther) \
309 f(Method) \
310 f(ConstMethod) \
311 f(MethodData) \
312 f(ConstantPool) \
313 f(ConstantPoolCache) \
314 f(Annotations) \
315 f(MethodCounters) \
316 f(RecordComponent) \
317 f(KlassTrainingData) \
318 f(MethodTrainingData) \
319 f(CompileTrainingData) \
320 f(AdapterHandlerEntry) \
321 f(AdapterFingerPrint)
322
323 #define METASPACE_OBJ_TYPE_DECLARE(name) name ## Type,
324 #define METASPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name;
325
326 enum Type {
327 // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
328 METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)
329 _number_of_types
330 };
331
332 static const char * type_name(Type type) {
333 switch(type) {
334 METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)
335 default:
336 ShouldNotReachHere();
337 return nullptr;
338 }
339 }
340
341 static MetaspaceObj::Type array_type(size_t elem_size) {
342 switch (elem_size) {
343 case 1: return TypeArrayU1Type;
344 case 2: return TypeArrayU2Type;
345 case 4: return TypeArrayU4Type;
346 case 8: return TypeArrayU8Type;
347 default:
348 return TypeArrayOtherType;
349 }
350 }
351
352 void* operator new(size_t size, ClassLoaderData* loader_data,
353 size_t word_size,
354 Type type, JavaThread* thread) throw();
355 // can't use TRAPS from this header file.
356 void* operator new(size_t size, ClassLoaderData* loader_data,
357 size_t word_size,
358 Type type) throw();
359 // This is used for allocating training data. We are allocating training data in many cases where a GC cannot be triggered.
360 void* operator new(size_t size, MemTag flags);
361 void operator delete(void* p) = delete;
362
363 // Declare a *static* method with the same signature in any subclass of MetaspaceObj
364 // that should be read-only by default. See symbol.hpp for an example. This function
365 // is used by the templates in metaspaceClosure.hpp
366 static bool is_read_only_by_default() { return false; }
367 };
368
369 // Base class for classes that constitute name spaces.
370
371 class Arena;
372
373 extern char* resource_allocate_bytes(size_t size,
374 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
375 extern char* resource_allocate_bytes(Thread* thread, size_t size,
376 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
377 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size,
378 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
379 extern void resource_free_bytes( Thread* thread, char *old, size_t size );
380
381 //----------------------------------------------------------------------
382 // Base class for objects allocated in the resource area.
383 class ResourceObj {
384 public:
385 void* operator new(size_t size) {
386 return resource_allocate_bytes(size);
387 }
388
389 void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() {
390 return resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
391 }
392
393 void* operator new [](size_t size) throw() = delete;
394 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() = delete;
395
396 void operator delete(void* p) = delete;
397 void operator delete [](void* p) = delete;
398 };
399
400 class ArenaObj {
401 public:
402 void* operator new(size_t size, Arena *arena) throw();
403 void* operator new [](size_t size, Arena *arena) throw() = delete;
404
405 void* operator new [](size_t size) throw() = delete;
406 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() = delete;
407
408 void operator delete(void* p) = delete;
409 void operator delete [](void* p) = delete;
410 };
411
412 //----------------------------------------------------------------------
413 // Base class for objects allocated in the resource area per default.
414 // Optionally, objects may be allocated on the C heap with
415 // new (AnyObj::C_HEAP) Foo(...) or in an Arena with new (&arena).
416 // AnyObj's can be allocated within other objects, but don't use
417 // new or delete (allocation_type is unknown). If new is used to allocate,
418 // use delete to deallocate.
419 class AnyObj {
420 public:
421 enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 };
422 static void set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN;
423 #ifdef ASSERT
424 private:
425 // When this object is allocated on stack the new() operator is not
426 // called but garbage on stack may look like a valid allocation_type.
427 // Store negated 'this' pointer when new() is called to distinguish cases.
428 // Use second array's element for verification value to distinguish garbage.
429 uintptr_t _allocation_t[2];
430 bool is_type_set() const;
431 void initialize_allocation_info();
432 public:
433 allocation_type get_allocation_type() const;
434 bool allocated_on_stack_or_embedded() const { return get_allocation_type() == STACK_OR_EMBEDDED; }
435 bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; }
436 bool allocated_on_C_heap() const { return get_allocation_type() == C_HEAP; }
437 bool allocated_on_arena() const { return get_allocation_type() == ARENA; }
438 protected:
439 AnyObj(); // default constructor
440 AnyObj(const AnyObj& r); // default copy constructor
441 AnyObj& operator=(const AnyObj& r); // default copy assignment
442 ~AnyObj();
443 #endif // ASSERT
444
445 public:
446 // CHeap allocations
447 void* operator new(size_t size, MemTag mem_tag) throw();
448 void* operator new [](size_t size, MemTag mem_tag) throw() = delete;
449 void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MemTag mem_tag) throw();
450 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant, MemTag mem_tag) throw() = delete;
451
452 // Arena allocations
453 void* operator new(size_t size, Arena *arena);
454 void* operator new [](size_t size, Arena *arena) = delete;
455
456 // Resource allocations
457 void* operator new(size_t size) {
458 address res = (address)resource_allocate_bytes(size);
459 DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
460 return res;
461 }
462 void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() {
463 address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
464 DEBUG_ONLY(if (res != nullptr) set_allocation_type(res, RESOURCE_AREA);)
465 return res;
466 }
467
468 void* operator new [](size_t size) = delete;
469 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) = delete;
470 void operator delete(void* p);
471 void operator delete [](void* p) = delete;
472
473 #ifndef PRODUCT
474 // Printing support
475 void print() const;
476 virtual void print_on(outputStream* st) const;
477 #endif // PRODUCT
478 };
479
480 // One of the following macros must be used when allocating an array
481 // or object to determine whether it should reside in the C heap on in
482 // the resource area.
483
484 #define NEW_RESOURCE_ARRAY(type, size)\
485 (type*) resource_allocate_bytes((size) * sizeof(type))
486
487 #define NEW_RESOURCE_ARRAY_RETURN_NULL(type, size)\
488 (type*) resource_allocate_bytes((size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
489
490 #define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\
491 (type*) resource_allocate_bytes(thread, (size) * sizeof(type))
492
493 #define NEW_RESOURCE_ARRAY_IN_THREAD_RETURN_NULL(thread, type, size)\
494 (type*) resource_allocate_bytes(thread, (size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
495
496 #define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\
497 (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type))
498
499 #define REALLOC_RESOURCE_ARRAY_RETURN_NULL(type, old, old_size, new_size)\
500 (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type),\
501 (new_size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
502
503 #define FREE_RESOURCE_ARRAY(type, old, size)\
504 resource_free_bytes(Thread::current(), (char*)(old), (size) * sizeof(type))
505
506 #define FREE_RESOURCE_ARRAY_IN_THREAD(thread, type, old, size)\
507 resource_free_bytes(thread, (char*)(old), (size) * sizeof(type))
508
509 #define FREE_FAST(old)\
510 /* nop */
511
512 #define NEW_RESOURCE_OBJ(type)\
513 NEW_RESOURCE_ARRAY(type, 1)
514
515 #define NEW_RESOURCE_OBJ_RETURN_NULL(type)\
516 NEW_RESOURCE_ARRAY_RETURN_NULL(type, 1)
517
518 #define NEW_C_HEAP_ARRAY3(type, size, mem_tag, pc, allocfail)\
519 (type*) AllocateHeap((size) * sizeof(type), mem_tag, pc, allocfail)
520
521 #define NEW_C_HEAP_ARRAY2(type, size, mem_tag, pc)\
522 (type*) (AllocateHeap((size) * sizeof(type), mem_tag, pc))
523
524 #define NEW_C_HEAP_ARRAY(type, size, mem_tag)\
525 (type*) (AllocateHeap((size) * sizeof(type), mem_tag))
526
527 #define NEW_C_HEAP_ARRAY2_RETURN_NULL(type, size, mem_tag, pc)\
528 NEW_C_HEAP_ARRAY3(type, (size), mem_tag, pc, AllocFailStrategy::RETURN_NULL)
529
530 #define NEW_C_HEAP_ARRAY_RETURN_NULL(type, size, mem_tag)\
531 NEW_C_HEAP_ARRAY2(type, (size), mem_tag, AllocFailStrategy::RETURN_NULL)
532
533 #define REALLOC_C_HEAP_ARRAY(type, old, size, mem_tag)\
534 (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), mem_tag))
535
536 #define REALLOC_C_HEAP_ARRAY_RETURN_NULL(type, old, size, mem_tag)\
537 (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), mem_tag, AllocFailStrategy::RETURN_NULL))
538
539 #define FREE_C_HEAP_ARRAY(type, old) \
540 FreeHeap((char*)(old))
541
542 // allocate type in heap without calling ctor
543 #define NEW_C_HEAP_OBJ(type, mem_tag)\
544 NEW_C_HEAP_ARRAY(type, 1, mem_tag)
545
546 #define NEW_C_HEAP_OBJ_RETURN_NULL(type, mem_tag)\
547 NEW_C_HEAP_ARRAY_RETURN_NULL(type, 1, mem_tag)
548
549 // deallocate obj of type in heap without calling dtor
550 #define FREE_C_HEAP_OBJ(objname)\
551 FreeHeap((char*)objname);
552
553
554 //------------------------------ReallocMark---------------------------------
555 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated
556 // ReallocMark, which is declared in the same scope as the reallocated
557 // pointer. Any operation that could __potentially__ cause a reallocation
558 // should check the ReallocMark.
559 class ReallocMark: public StackObj {
560 protected:
561 NOT_PRODUCT(int _nesting;)
562
563 public:
564 ReallocMark() PRODUCT_RETURN;
565 void check(Arena* arena = nullptr) PRODUCT_RETURN;
566 };
567
568 // Uses mmapped memory for all allocations. All allocations are initially
569 // zero-filled. No pre-touching.
570 template <class E>
571 class MmapArrayAllocator : public AllStatic {
572 private:
573 static size_t size_for(size_t length);
574
575 public:
576 static E* allocate_or_null(size_t length, MemTag mem_tag);
577 static E* allocate(size_t length, MemTag mem_tag);
578 static void free(E* addr, size_t length);
579 };
580
581 // Uses malloc:ed memory for all allocations.
582 template <class E>
583 class MallocArrayAllocator : public AllStatic {
584 public:
585 static size_t size_for(size_t length);
586
587 static E* allocate(size_t length, MemTag mem_tag);
588 static E* reallocate(E* addr, size_t new_length, MemTag mem_tag);
589 static void free(E* addr);
590 };
591
592 #endif // SHARE_MEMORY_ALLOCATION_HPP