1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_MEMORY_ALLOCATION_HPP
26 #define SHARE_MEMORY_ALLOCATION_HPP
27
28 #include "cppstdlib/new.hpp"
29 #include "memory/allStatic.hpp"
30 #include "nmt/memTag.hpp"
31 #include "utilities/debug.hpp"
32 #include "utilities/globalDefinitions.hpp"
33 #include "utilities/macros.hpp"
34
35 class outputStream;
36 class Thread;
37 class JavaThread;
38
39 class AllocFailStrategy {
40 public:
41 enum AllocFailEnum { EXIT_OOM, RETURN_NULL };
42 };
43 typedef AllocFailStrategy::AllocFailEnum AllocFailType;
44
45 // The virtual machine must never call one of the implicitly declared
46 // global allocation or deletion functions. (Such calls may result in
47 // link-time or run-time errors.) For convenience and documentation of
48 // intended use, classes in the virtual machine may be derived from one
49 // of the following allocation classes, some of which define allocation
50 // and deletion functions.
51 // Note: std::malloc and std::free should never called directly.
52
53 //
54 // For objects allocated in the resource area (see resourceArea.hpp).
55 // - ResourceObj
56 //
57 // For objects allocated in the C-heap (managed by: free & malloc and tracked with NMT)
58 // - CHeapObj
59 //
60 // For objects allocated on the stack.
61 // - StackObj
62 //
63 // For classes used as name spaces.
64 // - AllStatic
65 //
66 // For classes in Metaspace (class data)
67 // - MetaspaceObj
68 //
69 // The printable subclasses are used for debugging and define virtual
70 // member functions for printing. Classes that avoid allocating the
71 // vtbl entries in the objects should therefore not be the printable
72 // subclasses.
73 //
74 // The following macros and function should be used to allocate memory
75 // directly in the resource area or in the C-heap, The _OBJ variants
76 // of the NEW/FREE_C_HEAP macros are used for alloc/dealloc simple
77 // objects which are not inherited from CHeapObj, note constructor and
78 // destructor are not called. The preferable way to allocate objects
79 // is using the new operator.
80 //
81 // WARNING: The array variant must only be used for a homogeneous array
82 // where all objects are of the exact type specified. If subtypes are
83 // stored in the array then must pay attention to calling destructors
84 // at needed.
85 //
86 // NEW_RESOURCE_ARRAY*
87 // REALLOC_RESOURCE_ARRAY*
88 // FREE_RESOURCE_ARRAY*
89 // NEW_RESOURCE_OBJ*
90 // NEW_C_HEAP_ARRAY*
91 // REALLOC_C_HEAP_ARRAY*
92 // FREE_C_HEAP_ARRAY*
93 // NEW_C_HEAP_OBJ*
94 // FREE_C_HEAP_OBJ
95 //
96 // char* AllocateHeap(size_t size, MemTag mem_tag, const NativeCallStack& stack, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
97 // char* AllocateHeap(size_t size, MemTag mem_tag, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
98 // char* ReallocateHeap(char *old, size_t size, MemTag mem_tag, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
99 // void FreeHeap(void* p);
100 //
101
102 extern bool NMT_track_callsite;
103
104 class NativeCallStack;
105
106
107 char* AllocateHeap(size_t size,
108 MemTag mem_tag,
109 const NativeCallStack& stack,
110 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
111 char* AllocateHeap(size_t size,
112 MemTag mem_tag,
113 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
114
115 char* ReallocateHeap(char *old,
116 size_t size,
117 MemTag mem_tag,
118 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
119
120 // handles null pointers
121 void FreeHeap(void* p);
122
123 class CHeapObjBase {
124 public:
125 ALWAYSINLINE void* operator new(size_t size, MemTag mem_tag) {
126 return AllocateHeap(size, mem_tag);
127 }
128
129 ALWAYSINLINE void* operator new(size_t size,
130 MemTag mem_tag,
131 const NativeCallStack& stack) {
132 return AllocateHeap(size, mem_tag, stack);
133 }
134
135 ALWAYSINLINE void* operator new(size_t size,
136 MemTag mem_tag,
137 const std::nothrow_t&,
138 const NativeCallStack& stack) throw() {
139 return AllocateHeap(size, mem_tag, stack, AllocFailStrategy::RETURN_NULL);
140 }
141
142 ALWAYSINLINE void* operator new(size_t size,
143 MemTag mem_tag,
144 const std::nothrow_t&) throw() {
145 return AllocateHeap(size, mem_tag, AllocFailStrategy::RETURN_NULL);
146 }
147
148 ALWAYSINLINE void* operator new[](size_t size, MemTag mem_tag) {
149 return AllocateHeap(size, mem_tag);
150 }
151
152 ALWAYSINLINE void* operator new[](size_t size,
153 MemTag mem_tag,
154 const NativeCallStack& stack) {
155 return AllocateHeap(size, mem_tag, stack);
156 }
157
158 ALWAYSINLINE void* operator new[](size_t size,
159 MemTag mem_tag,
160 const std::nothrow_t&,
161 const NativeCallStack& stack) throw() {
162 return AllocateHeap(size, mem_tag, stack, AllocFailStrategy::RETURN_NULL);
163 }
164
165 ALWAYSINLINE void* operator new[](size_t size,
166 MemTag mem_tag,
167 const std::nothrow_t&) throw() {
168 return AllocateHeap(size, mem_tag, AllocFailStrategy::RETURN_NULL);
169 }
170
171 void operator delete(void* p) { FreeHeap(p); }
172 void operator delete [] (void* p) { FreeHeap(p); }
173 };
174
175 // Uses the implicitly static new and delete operators of CHeapObjBase
176 template<MemTag MT>
177 class CHeapObj {
178 public:
179 ALWAYSINLINE void* operator new(size_t size) {
180 return CHeapObjBase::operator new(size, MT);
181 }
182
183 ALWAYSINLINE void* operator new(size_t size,
184 const NativeCallStack& stack) {
185 return CHeapObjBase::operator new(size, MT, stack);
186 }
187
188 ALWAYSINLINE void* operator new(size_t size, const std::nothrow_t& nt,
189 const NativeCallStack& stack) throw() {
190 return CHeapObjBase::operator new(size, MT, nt, stack);
191 }
192
193 ALWAYSINLINE void* operator new(size_t size, const std::nothrow_t& nt) throw() {
194 return CHeapObjBase::operator new(size, MT, nt);
195 }
196
197 ALWAYSINLINE void* operator new[](size_t size) {
198 return CHeapObjBase::operator new[](size, MT);
199 }
200
201 ALWAYSINLINE void* operator new[](size_t size,
202 const NativeCallStack& stack) {
203 return CHeapObjBase::operator new[](size, MT, stack);
204 }
205
206 ALWAYSINLINE void* operator new[](size_t size, const std::nothrow_t& nt,
207 const NativeCallStack& stack) throw() {
208 return CHeapObjBase::operator new[](size, MT, nt, stack);
209 }
210
211 ALWAYSINLINE void* operator new[](size_t size, const std::nothrow_t& nt) throw() {
212 return CHeapObjBase::operator new[](size, MT, nt);
213 }
214
215 void operator delete(void* p) {
216 CHeapObjBase::operator delete(p);
217 }
218
219 void operator delete [] (void* p) {
220 CHeapObjBase::operator delete[](p);
221 }
222 };
223
224 // Base class for objects allocated on the stack only.
225 // Calling new or delete will result in fatal error.
226
227 class StackObj {
228 public:
229 void* operator new(size_t size) = delete;
230 void* operator new [](size_t size) = delete;
231 void operator delete(void* p) = delete;
232 void operator delete [](void* p) = delete;
233 };
234
235 // Base class for objects stored in Metaspace.
236 // Calling delete will result in fatal error.
237 //
238 // Do not inherit from something with a vptr because this class does
239 // not introduce one. This class is used to allocate both shared read-only
240 // and shared read-write classes.
241 //
242
243 class ClassLoaderData;
244 class MetaspaceClosure;
245
246 class MetaspaceObj {
247 // There are functions that all subtypes of MetaspaceObj are expected
248 // to implement, so that templates which are defined for this class hierarchy
249 // can work uniformly. Within the sub-hierarchy of Metadata, these are virtuals.
250 // Elsewhere in the hierarchy of MetaspaceObj, type(), size(), and/or on_stack()
251 // can be static if constant.
252 //
253 // The following functions are required by MetaspaceClosure:
254 // void metaspace_pointers_do(MetaspaceClosure* it) { <walk my refs> }
255 // int size() const { return align_up(sizeof(<This>), wordSize) / wordSize; }
256 // MetaspaceObj::Type type() const { return <This>Type; }
257 //
258 // The following functions are required by MetadataFactory::free_metadata():
259 // bool on_stack() { return false; }
260 // void deallocate_contents(ClassLoaderData* loader_data);
261
262 friend class VMStructs;
263 // All metsapce objects in the AOT cache (CDS archive) are mapped
264 // into a single contiguous memory block, so we can use these
265 // two pointers to quickly determine if a MetaspaceObj is in the
266 // AOT cache.
267 // When AOT/CDS is not enabled, both pointers are set to null.
268 static void* _aot_metaspace_base; // (inclusive) low address
269 static void* _aot_metaspace_top; // (exclusive) high address
270
271 public:
272
273 // Returns true if the pointer points to a valid MetaspaceObj. A valid
274 // MetaspaceObj is MetaWord-aligned and contained within either
275 // regular- or aot metaspace.
276 static bool is_valid(const MetaspaceObj* p);
277
278 #if INCLUDE_CDS
279 static bool in_aot_cache(const MetaspaceObj* p) {
280 // If no shared metaspace regions are mapped, _aot_metaspace_{base,top} will
281 // both be null and all values of p will be rejected quickly.
282 return (((void*)p) < _aot_metaspace_top &&
283 ((void*)p) >= _aot_metaspace_base);
284 }
285 bool in_aot_cache() const { return MetaspaceObj::in_aot_cache(this); }
286 #else
287 static bool in_aot_cache(const MetaspaceObj* p) { return false; }
288 bool in_aot_cache() const { return false; }
289 #endif
290
291 void print_address_on(outputStream* st) const; // nonvirtual address printing
292
293 static void set_aot_metaspace_range(void* base, void* top) {
294 _aot_metaspace_base = base;
295 _aot_metaspace_top = top;
296 }
297
298 static void* aot_metaspace_base() { return _aot_metaspace_base; }
299 static void* aot_metaspace_top() { return _aot_metaspace_top; }
300
301 #define METASPACE_OBJ_TYPES_DO(f) \
302 f(Class) \
303 f(Symbol) \
304 f(TypeArrayU1) \
305 f(TypeArrayU2) \
306 f(TypeArrayU4) \
307 f(TypeArrayU8) \
308 f(TypeArrayOther) \
309 f(Method) \
310 f(ConstMethod) \
311 f(MethodData) \
312 f(ConstantPool) \
313 f(ConstantPoolCache) \
314 f(Annotations) \
315 f(MethodCounters) \
316 f(InlineLayoutInfo) \
317 f(RecordComponent) \
318 f(KlassTrainingData) \
319 f(MethodTrainingData) \
320 f(CompileTrainingData) \
321 f(AdapterHandlerEntry) \
322 f(AdapterFingerPrint)
323
324 #define METASPACE_OBJ_TYPE_DECLARE(name) name ## Type,
325 #define METASPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name;
326
327 enum Type {
328 // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc
329 METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE)
330 _number_of_types
331 };
332
333 static const char * type_name(Type type) {
334 switch(type) {
335 METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE)
336 default:
337 ShouldNotReachHere();
338 return nullptr;
339 }
340 }
341
342 static MetaspaceObj::Type array_type(size_t elem_size) {
343 switch (elem_size) {
344 case 1: return TypeArrayU1Type;
345 case 2: return TypeArrayU2Type;
346 case 4: return TypeArrayU4Type;
347 case 8: return TypeArrayU8Type;
348 default:
349 return TypeArrayOtherType;
350 }
351 }
352
353 void* operator new(size_t size, ClassLoaderData* loader_data,
354 size_t word_size,
355 Type type, JavaThread* thread) throw();
356 // can't use TRAPS from this header file.
357 void* operator new(size_t size, ClassLoaderData* loader_data,
358 size_t word_size,
359 Type type) throw();
360 // This is used for allocating training data. We are allocating training data in many cases where a GC cannot be triggered.
361 void* operator new(size_t size, MemTag flags);
362 void operator delete(void* p) = delete;
363
364 // Declare a *static* method with the same signature in any subclass of MetaspaceObj
365 // that should be read-only by default. See symbol.hpp for an example. This function
366 // is used by the templates in metaspaceClosure.hpp
367 static bool is_read_only_by_default() { return false; }
368 };
369
370 // Base class for classes that constitute name spaces.
371
372 class Arena;
373
374 extern char* resource_allocate_bytes(size_t size,
375 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
376 extern char* resource_allocate_bytes(Thread* thread, size_t size,
377 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
378 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size,
379 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
380 extern void resource_free_bytes( Thread* thread, char *old, size_t size );
381
382 //----------------------------------------------------------------------
383 // Base class for objects allocated in the resource area.
384 class ResourceObj {
385 public:
386 void* operator new(size_t size) {
387 return resource_allocate_bytes(size);
388 }
389
390 void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() {
391 return resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
392 }
393
394 void* operator new [](size_t size) throw() = delete;
395 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() = delete;
396
397 void operator delete(void* p) = delete;
398 void operator delete [](void* p) = delete;
399 };
400
401 class ArenaObj {
402 public:
403 void* operator new(size_t size, Arena *arena) throw();
404 void* operator new [](size_t size, Arena *arena) throw() = delete;
405
406 void* operator new [](size_t size) throw() = delete;
407 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() = delete;
408
409 void operator delete(void* p) = delete;
410 void operator delete [](void* p) = delete;
411 };
412
413 //----------------------------------------------------------------------
414 // Base class for objects allocated in the resource area per default.
415 // Optionally, objects may be allocated on the C heap with
416 // new (AnyObj::C_HEAP) Foo(...) or in an Arena with new (&arena).
417 // AnyObj's can be allocated within other objects, but don't use
418 // new or delete (allocation_type is unknown). If new is used to allocate,
419 // use delete to deallocate.
420 class AnyObj {
421 public:
422 enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 };
423 static void set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN;
424 #ifdef ASSERT
425 private:
426 // When this object is allocated on stack the new() operator is not
427 // called but garbage on stack may look like a valid allocation_type.
428 // Store negated 'this' pointer when new() is called to distinguish cases.
429 // Use second array's element for verification value to distinguish garbage.
430 uintptr_t _allocation_t[2];
431 bool is_type_set() const;
432 void initialize_allocation_info();
433 public:
434 allocation_type get_allocation_type() const;
435 bool allocated_on_stack_or_embedded() const { return get_allocation_type() == STACK_OR_EMBEDDED; }
436 bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; }
437 bool allocated_on_C_heap() const { return get_allocation_type() == C_HEAP; }
438 bool allocated_on_arena() const { return get_allocation_type() == ARENA; }
439 protected:
440 AnyObj(); // default constructor
441 AnyObj(const AnyObj& r); // default copy constructor
442 AnyObj& operator=(const AnyObj& r); // default copy assignment
443 ~AnyObj();
444 #endif // ASSERT
445
446 public:
447 // CHeap allocations
448 void* operator new(size_t size, MemTag mem_tag) throw();
449 void* operator new [](size_t size, MemTag mem_tag) throw() = delete;
450 void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MemTag mem_tag) throw();
451 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant, MemTag mem_tag) throw() = delete;
452
453 // Arena allocations
454 void* operator new(size_t size, Arena *arena);
455 void* operator new [](size_t size, Arena *arena) = delete;
456
457 // Resource allocations
458 void* operator new(size_t size) {
459 address res = (address)resource_allocate_bytes(size);
460 DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
461 return res;
462 }
463 void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() {
464 address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
465 DEBUG_ONLY(if (res != nullptr) set_allocation_type(res, RESOURCE_AREA);)
466 return res;
467 }
468
469 void* operator new [](size_t size) = delete;
470 void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) = delete;
471 void operator delete(void* p);
472 void operator delete [](void* p) = delete;
473
474 #ifndef PRODUCT
475 // Printing support
476 void print() const;
477 virtual void print_on(outputStream* st) const;
478 #endif // PRODUCT
479 };
480
481 // One of the following macros must be used when allocating an array
482 // or object to determine whether it should reside in the C heap on in
483 // the resource area.
484
485 #define NEW_RESOURCE_ARRAY(type, size)\
486 (type*) resource_allocate_bytes((size) * sizeof(type))
487
488 #define NEW_RESOURCE_ARRAY_RETURN_NULL(type, size)\
489 (type*) resource_allocate_bytes((size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
490
491 #define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\
492 (type*) resource_allocate_bytes(thread, (size) * sizeof(type))
493
494 #define NEW_RESOURCE_ARRAY_IN_THREAD_RETURN_NULL(thread, type, size)\
495 (type*) resource_allocate_bytes(thread, (size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
496
497 #define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\
498 (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type))
499
500 #define REALLOC_RESOURCE_ARRAY_RETURN_NULL(type, old, old_size, new_size)\
501 (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type),\
502 (new_size) * sizeof(type), AllocFailStrategy::RETURN_NULL)
503
504 #define FREE_RESOURCE_ARRAY(type, old, size)\
505 resource_free_bytes(Thread::current(), (char*)(old), (size) * sizeof(type))
506
507 #define FREE_RESOURCE_ARRAY_IN_THREAD(thread, type, old, size)\
508 resource_free_bytes(thread, (char*)(old), (size) * sizeof(type))
509
510 #define FREE_FAST(old)\
511 /* nop */
512
513 #define NEW_RESOURCE_OBJ(type)\
514 NEW_RESOURCE_ARRAY(type, 1)
515
516 #define NEW_RESOURCE_OBJ_RETURN_NULL(type)\
517 NEW_RESOURCE_ARRAY_RETURN_NULL(type, 1)
518
519 #define NEW_C_HEAP_ARRAY3(type, size, mem_tag, pc, allocfail)\
520 (type*) AllocateHeap((size) * sizeof(type), mem_tag, pc, allocfail)
521
522 #define NEW_C_HEAP_ARRAY2(type, size, mem_tag, pc)\
523 (type*) (AllocateHeap((size) * sizeof(type), mem_tag, pc))
524
525 #define NEW_C_HEAP_ARRAY(type, size, mem_tag)\
526 (type*) (AllocateHeap((size) * sizeof(type), mem_tag))
527
528 #define NEW_C_HEAP_ARRAY2_RETURN_NULL(type, size, mem_tag, pc)\
529 NEW_C_HEAP_ARRAY3(type, (size), mem_tag, pc, AllocFailStrategy::RETURN_NULL)
530
531 #define NEW_C_HEAP_ARRAY_RETURN_NULL(type, size, mem_tag)\
532 NEW_C_HEAP_ARRAY2(type, (size), mem_tag, AllocFailStrategy::RETURN_NULL)
533
534 #define REALLOC_C_HEAP_ARRAY(type, old, size, mem_tag)\
535 (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), mem_tag))
536
537 #define REALLOC_C_HEAP_ARRAY_RETURN_NULL(type, old, size, mem_tag)\
538 (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), mem_tag, AllocFailStrategy::RETURN_NULL))
539
540 #define FREE_C_HEAP_ARRAY(type, old) \
541 FreeHeap((char*)(old))
542
543 // allocate type in heap without calling ctor
544 #define NEW_C_HEAP_OBJ(type, mem_tag)\
545 NEW_C_HEAP_ARRAY(type, 1, mem_tag)
546
547 #define NEW_C_HEAP_OBJ_RETURN_NULL(type, mem_tag)\
548 NEW_C_HEAP_ARRAY_RETURN_NULL(type, 1, mem_tag)
549
550 // deallocate obj of type in heap without calling dtor
551 #define FREE_C_HEAP_OBJ(objname)\
552 FreeHeap((char*)objname);
553
554
555 //------------------------------ReallocMark---------------------------------
556 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated
557 // ReallocMark, which is declared in the same scope as the reallocated
558 // pointer. Any operation that could __potentially__ cause a reallocation
559 // should check the ReallocMark.
560 class ReallocMark: public StackObj {
561 protected:
562 NOT_PRODUCT(int _nesting;)
563
564 public:
565 ReallocMark() PRODUCT_RETURN;
566 void check(Arena* arena = nullptr) PRODUCT_RETURN;
567 };
568
569 // Uses mmapped memory for all allocations. All allocations are initially
570 // zero-filled. No pre-touching.
571 template <class E>
572 class MmapArrayAllocator : public AllStatic {
573 private:
574 static size_t size_for(size_t length);
575
576 public:
577 static E* allocate_or_null(size_t length, MemTag mem_tag);
578 static E* allocate(size_t length, MemTag mem_tag);
579 static void free(E* addr, size_t length);
580 };
581
582 // Uses malloc:ed memory for all allocations.
583 template <class E>
584 class MallocArrayAllocator : public AllStatic {
585 public:
586 static size_t size_for(size_t length);
587
588 static E* allocate(size_t length, MemTag mem_tag);
589 static E* reallocate(E* addr, size_t new_length, MemTag mem_tag);
590 static void free(E* addr);
591 };
592
593 #endif // SHARE_MEMORY_ALLOCATION_HPP