1 /* 2 * Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.hpp" 27 #include "classfile/vmClasses.hpp" 28 #include "gc/shared/allocTracer.hpp" 29 #include "gc/shared/collectedHeap.hpp" 30 #include "gc/shared/memAllocator.hpp" 31 #include "gc/shared/threadLocalAllocBuffer.inline.hpp" 32 #include "gc/shared/tlab_globals.hpp" 33 #include "memory/universe.hpp" 34 #include "oops/arrayOop.hpp" 35 #include "oops/oop.inline.hpp" 36 #include "prims/jvmtiExport.hpp" 37 #include "runtime/continuationJavaClasses.inline.hpp" 38 #include "runtime/handles.inline.hpp" 39 #include "runtime/sharedRuntime.hpp" 40 #include "runtime/javaThread.hpp" 41 #include "services/lowMemoryDetector.hpp" 42 #include "utilities/align.hpp" 43 #include "utilities/copy.hpp" 44 45 class MemAllocator::Allocation: StackObj { 46 friend class MemAllocator; 47 48 const MemAllocator& _allocator; 49 JavaThread* _thread; 50 oop* _obj_ptr; 51 bool _overhead_limit_exceeded; 52 bool _allocated_outside_tlab; 53 size_t _allocated_tlab_size; 54 bool _tlab_end_reset_for_sample; 55 56 bool check_out_of_memory(); 57 void verify_before(); 58 void verify_after(); 59 void notify_allocation(JavaThread* thread); 60 void notify_allocation_jvmti_sampler(); 61 void notify_allocation_low_memory_detector(); 62 void notify_allocation_jfr_sampler(); 63 void notify_allocation_dtrace_sampler(JavaThread* thread); 64 void check_for_bad_heap_word_value() const; 65 #ifdef ASSERT 66 void check_for_valid_allocation_state() const; 67 #endif 68 69 class PreserveObj; 70 71 public: 72 Allocation(const MemAllocator& allocator, oop* obj_ptr) 73 : _allocator(allocator), 74 _thread(JavaThread::current()), 75 _obj_ptr(obj_ptr), 76 _overhead_limit_exceeded(false), 77 _allocated_outside_tlab(false), 78 _allocated_tlab_size(0), 79 _tlab_end_reset_for_sample(false) 80 { 81 verify_before(); 82 } 83 84 ~Allocation() { 85 if (!check_out_of_memory()) { 86 verify_after(); 87 notify_allocation(_thread); 88 } 89 } 90 91 oop obj() const { return *_obj_ptr; } 92 }; 93 94 class MemAllocator::Allocation::PreserveObj: StackObj { 95 HandleMark _handle_mark; 96 Handle _handle; 97 oop* const _obj_ptr; 98 99 public: 100 PreserveObj(JavaThread* thread, oop* obj_ptr) 101 : _handle_mark(thread), 102 _handle(thread, *obj_ptr), 103 _obj_ptr(obj_ptr) 104 { 105 *obj_ptr = nullptr; 106 } 107 108 ~PreserveObj() { 109 *_obj_ptr = _handle(); 110 } 111 112 oop operator()() const { 113 return _handle(); 114 } 115 }; 116 117 bool MemAllocator::Allocation::check_out_of_memory() { 118 JavaThread* THREAD = _thread; // For exception macros. 119 assert(!HAS_PENDING_EXCEPTION, "Unexpected exception, will result in uninitialized storage"); 120 121 if (obj() != nullptr) { 122 return false; 123 } 124 125 const char* message = _overhead_limit_exceeded ? "GC overhead limit exceeded" : "Java heap space"; 126 if (!_thread->in_retryable_allocation()) { 127 // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support 128 report_java_out_of_memory(message); 129 130 if (JvmtiExport::should_post_resource_exhausted()) { 131 JvmtiExport::post_resource_exhausted( 132 JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP, 133 message); 134 } 135 oop exception = _overhead_limit_exceeded ? 136 Universe::out_of_memory_error_gc_overhead_limit() : 137 Universe::out_of_memory_error_java_heap(); 138 THROW_OOP_(exception, true); 139 } else { 140 THROW_OOP_(Universe::out_of_memory_error_retry(), true); 141 } 142 } 143 144 void MemAllocator::Allocation::verify_before() { 145 // Clear unhandled oops for memory allocation. Memory allocation might 146 // not take out a lock if from tlab, so clear here. 147 JavaThread* THREAD = _thread; // For exception macros. 148 assert(!HAS_PENDING_EXCEPTION, "Should not allocate with exception pending"); 149 debug_only(check_for_valid_allocation_state()); 150 assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); 151 } 152 153 void MemAllocator::Allocation::verify_after() { 154 NOT_PRODUCT(check_for_bad_heap_word_value();) 155 } 156 157 void MemAllocator::Allocation::check_for_bad_heap_word_value() const { 158 MemRegion obj_range = _allocator.obj_memory_range(obj()); 159 HeapWord* addr = obj_range.start(); 160 size_t size = obj_range.word_size(); 161 if (CheckMemoryInitialization && ZapUnusedHeapArea) { 162 for (size_t slot = 0; slot < size; slot += 1) { 163 assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal), 164 "Found badHeapWordValue in post-allocation check"); 165 } 166 } 167 } 168 169 #ifdef ASSERT 170 void MemAllocator::Allocation::check_for_valid_allocation_state() const { 171 // How to choose between a pending exception and a potential 172 // OutOfMemoryError? Don't allow pending exceptions. 173 // This is a VM policy failure, so how do we exhaustively test it? 174 assert(!_thread->has_pending_exception(), 175 "shouldn't be allocating with pending exception"); 176 // Allocation of an oop can always invoke a safepoint. 177 JavaThread::cast(_thread)->check_for_valid_safepoint_state(); 178 } 179 #endif 180 181 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() { 182 // support for JVMTI VMObjectAlloc event (no-op if not enabled) 183 JvmtiExport::vm_object_alloc_event_collector(obj()); 184 185 if (!JvmtiExport::should_post_sampled_object_alloc()) { 186 // Sampling disabled 187 return; 188 } 189 190 if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) { 191 // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB 192 // or expands it due to taking a sampler induced slow path. 193 return; 194 } 195 196 // If we want to be sampling, protect the allocated object with a Handle 197 // before doing the callback. The callback is done in the destructor of 198 // the JvmtiSampledObjectAllocEventCollector. 199 size_t bytes_since_last = 0; 200 201 { 202 PreserveObj obj_h(_thread, _obj_ptr); 203 JvmtiSampledObjectAllocEventCollector collector; 204 size_t size_in_bytes = _allocator._word_size * HeapWordSize; 205 ThreadLocalAllocBuffer& tlab = _thread->tlab(); 206 207 if (!_allocated_outside_tlab) { 208 bytes_since_last = tlab.bytes_since_last_sample_point(); 209 } 210 211 _thread->heap_sampler().check_for_sampling(obj_h(), size_in_bytes, bytes_since_last); 212 } 213 214 if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) { 215 // Tell tlab to forget bytes_since_last if we passed it to the heap sampler. 216 _thread->tlab().set_sample_end(bytes_since_last != 0); 217 } 218 } 219 220 void MemAllocator::Allocation::notify_allocation_low_memory_detector() { 221 // support low memory notifications (no-op if not enabled) 222 LowMemoryDetector::detect_low_memory_for_collected_pools(); 223 } 224 225 void MemAllocator::Allocation::notify_allocation_jfr_sampler() { 226 HeapWord* mem = cast_from_oop<HeapWord*>(obj()); 227 size_t size_in_bytes = _allocator._word_size * HeapWordSize; 228 229 if (_allocated_outside_tlab) { 230 AllocTracer::send_allocation_outside_tlab(obj()->klass(), mem, size_in_bytes, _thread); 231 } else if (_allocated_tlab_size != 0) { 232 // TLAB was refilled 233 AllocTracer::send_allocation_in_new_tlab(obj()->klass(), mem, _allocated_tlab_size * HeapWordSize, 234 size_in_bytes, _thread); 235 } 236 } 237 238 void MemAllocator::Allocation::notify_allocation_dtrace_sampler(JavaThread* thread) { 239 if (DTraceAllocProbes) { 240 // support for Dtrace object alloc event (no-op most of the time) 241 Klass* klass = obj()->klass(); 242 size_t word_size = _allocator._word_size; 243 if (klass != nullptr && klass->name() != nullptr) { 244 SharedRuntime::dtrace_object_alloc(thread, obj(), word_size); 245 } 246 } 247 } 248 249 void MemAllocator::Allocation::notify_allocation(JavaThread* thread) { 250 notify_allocation_low_memory_detector(); 251 notify_allocation_jfr_sampler(); 252 notify_allocation_dtrace_sampler(thread); 253 notify_allocation_jvmti_sampler(); 254 } 255 256 HeapWord* MemAllocator::mem_allocate_outside_tlab(Allocation& allocation) const { 257 allocation._allocated_outside_tlab = true; 258 HeapWord* mem = Universe::heap()->mem_allocate(_word_size, &allocation._overhead_limit_exceeded); 259 if (mem == nullptr) { 260 return mem; 261 } 262 263 NOT_PRODUCT(Universe::heap()->check_for_non_bad_heap_word_value(mem, _word_size)); 264 size_t size_in_bytes = _word_size * HeapWordSize; 265 _thread->incr_allocated_bytes(size_in_bytes); 266 267 return mem; 268 } 269 270 HeapWord* MemAllocator::mem_allocate_inside_tlab(Allocation& allocation) const { 271 assert(UseTLAB, "should use UseTLAB"); 272 273 // Try allocating from an existing TLAB. 274 HeapWord* mem = mem_allocate_inside_tlab_fast(); 275 if (mem != nullptr) { 276 return mem; 277 } 278 279 // Try refilling the TLAB and allocating the object in it. 280 return mem_allocate_inside_tlab_slow(allocation); 281 } 282 283 HeapWord* MemAllocator::mem_allocate_inside_tlab_fast() const { 284 return _thread->tlab().allocate(_word_size); 285 } 286 287 HeapWord* MemAllocator::mem_allocate_inside_tlab_slow(Allocation& allocation) const { 288 HeapWord* mem = nullptr; 289 ThreadLocalAllocBuffer& tlab = _thread->tlab(); 290 291 if (JvmtiExport::should_post_sampled_object_alloc()) { 292 tlab.set_back_allocation_end(); 293 mem = tlab.allocate(_word_size); 294 295 // We set back the allocation sample point to try to allocate this, reset it 296 // when done. 297 allocation._tlab_end_reset_for_sample = true; 298 299 if (mem != nullptr) { 300 return mem; 301 } 302 } 303 304 // Retain tlab and allocate object in shared space if 305 // the amount free in the tlab is too large to discard. 306 if (tlab.free() > tlab.refill_waste_limit()) { 307 tlab.record_slow_allocation(_word_size); 308 return nullptr; 309 } 310 311 // Discard tlab and allocate a new one. 312 // To minimize fragmentation, the last TLAB may be smaller than the rest. 313 size_t new_tlab_size = tlab.compute_size(_word_size); 314 315 tlab.retire_before_allocation(); 316 317 if (new_tlab_size == 0) { 318 return nullptr; 319 } 320 321 // Allocate a new TLAB requesting new_tlab_size. Any size 322 // between minimal and new_tlab_size is accepted. 323 size_t min_tlab_size = ThreadLocalAllocBuffer::compute_min_size(_word_size); 324 mem = Universe::heap()->allocate_new_tlab(min_tlab_size, new_tlab_size, &allocation._allocated_tlab_size); 325 if (mem == nullptr) { 326 assert(allocation._allocated_tlab_size == 0, 327 "Allocation failed, but actual size was updated. min: " SIZE_FORMAT 328 ", desired: " SIZE_FORMAT ", actual: " SIZE_FORMAT, 329 min_tlab_size, new_tlab_size, allocation._allocated_tlab_size); 330 return nullptr; 331 } 332 assert(allocation._allocated_tlab_size != 0, "Allocation succeeded but actual size not updated. mem at: " 333 PTR_FORMAT " min: " SIZE_FORMAT ", desired: " SIZE_FORMAT, 334 p2i(mem), min_tlab_size, new_tlab_size); 335 336 if (ZeroTLAB) { 337 // ..and clear it. 338 Copy::zero_to_words(mem, allocation._allocated_tlab_size); 339 } else { 340 // ...and zap just allocated object. 341 #ifdef ASSERT 342 // Skip mangling the space corresponding to the object header to 343 // ensure that the returned space is not considered parsable by 344 // any concurrent GC thread. 345 size_t hdr_size = oopDesc::header_size(); 346 Copy::fill_to_words(mem + hdr_size, allocation._allocated_tlab_size - hdr_size, badHeapWordVal); 347 #endif // ASSERT 348 } 349 350 tlab.fill(mem, mem + _word_size, allocation._allocated_tlab_size); 351 return mem; 352 } 353 354 355 HeapWord* MemAllocator::mem_allocate_slow(Allocation& allocation) const { 356 // Allocation of an oop can always invoke a safepoint. 357 debug_only(JavaThread::cast(_thread)->check_for_valid_safepoint_state()); 358 359 if (UseTLAB) { 360 // Try refilling the TLAB and allocating the object in it. 361 HeapWord* mem = mem_allocate_inside_tlab_slow(allocation); 362 if (mem != nullptr) { 363 return mem; 364 } 365 } 366 367 return mem_allocate_outside_tlab(allocation); 368 } 369 370 HeapWord* MemAllocator::mem_allocate(Allocation& allocation) const { 371 if (UseTLAB) { 372 // Try allocating from an existing TLAB. 373 HeapWord* mem = mem_allocate_inside_tlab_fast(); 374 if (mem != nullptr) { 375 return mem; 376 } 377 } 378 379 return mem_allocate_slow(allocation); 380 } 381 382 oop MemAllocator::allocate() const { 383 oop obj = nullptr; 384 { 385 Allocation allocation(*this, &obj); 386 HeapWord* mem = mem_allocate(allocation); 387 if (mem != nullptr) { 388 obj = initialize(mem); 389 } else { 390 // The unhandled oop detector will poison local variable obj, 391 // so reset it to null if mem is null. 392 obj = nullptr; 393 } 394 } 395 return obj; 396 } 397 398 void MemAllocator::mem_clear(HeapWord* mem) const { 399 assert(mem != nullptr, "cannot initialize null object"); 400 const size_t hs = oopDesc::header_size(); 401 assert(_word_size >= hs, "unexpected object size"); 402 if (!UseCompactObjectHeaders) { 403 oopDesc::set_klass_gap(mem, 0); 404 } 405 Copy::fill_to_aligned_words(mem + hs, _word_size - hs); 406 } 407 408 oop MemAllocator::finish(HeapWord* mem) const { 409 assert(mem != nullptr, "null object pointer"); 410 // Need a release store to ensure array/class length, mark word, and 411 // object zeroing are visible before setting the klass non-null, for 412 // concurrent collectors. 413 if (UseCompactObjectHeaders) { 414 oopDesc::release_set_mark(mem, _klass->prototype_header()); 415 } else { 416 oopDesc::set_mark(mem, markWord::prototype()); 417 oopDesc::release_set_klass(mem, _klass); 418 } 419 return cast_to_oop(mem); 420 } 421 422 oop ObjAllocator::initialize(HeapWord* mem) const { 423 mem_clear(mem); 424 return finish(mem); 425 } 426 427 MemRegion ObjArrayAllocator::obj_memory_range(oop obj) const { 428 if (_do_zero) { 429 return MemAllocator::obj_memory_range(obj); 430 } 431 ArrayKlass* array_klass = ArrayKlass::cast(_klass); 432 const size_t hs = align_up(arrayOopDesc::base_offset_in_bytes(array_klass->element_type()), HeapWordSize) / HeapWordSize; 433 return MemRegion(cast_from_oop<HeapWord*>(obj) + hs, _word_size - hs); 434 } 435 436 oop ObjArrayAllocator::initialize(HeapWord* mem) const { 437 // Set array length before setting the _klass field because a 438 // non-null klass field indicates that the object is parsable by 439 // concurrent GC. 440 assert(_length >= 0, "length should be non-negative"); 441 if (_do_zero) { 442 mem_clear(mem); 443 } 444 arrayOopDesc::set_length(mem, _length); 445 return finish(mem); 446 } 447 448 oop ClassAllocator::initialize(HeapWord* mem) const { 449 // Set oop_size field before setting the _klass field because a 450 // non-null _klass field indicates that the object is parsable by 451 // concurrent GC. 452 assert(_word_size > 0, "oop_size must be positive."); 453 mem_clear(mem); 454 java_lang_Class::set_oop_size(mem, _word_size); 455 return finish(mem); 456 }