1 /*
  2  * Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/javaClasses.hpp"
 27 #include "classfile/vmClasses.hpp"
 28 #include "gc/shared/allocTracer.hpp"
 29 #include "gc/shared/collectedHeap.hpp"
 30 #include "gc/shared/memAllocator.hpp"
 31 #include "gc/shared/threadLocalAllocBuffer.inline.hpp"
 32 #include "gc/shared/tlab_globals.hpp"
 33 #include "memory/universe.hpp"
 34 #include "oops/arrayOop.hpp"
 35 #include "oops/oop.inline.hpp"
 36 #include "prims/jvmtiExport.hpp"
 37 #include "runtime/continuationJavaClasses.inline.hpp"
 38 #include "runtime/handles.inline.hpp"
 39 #include "runtime/sharedRuntime.hpp"
 40 #include "runtime/javaThread.hpp"
 41 #include "services/lowMemoryDetector.hpp"
 42 #include "utilities/align.hpp"
 43 #include "utilities/copy.hpp"
 44 
 45 class MemAllocator::Allocation: StackObj {
 46   friend class MemAllocator;
 47 
 48   const MemAllocator& _allocator;
 49   JavaThread*         _thread;
 50   oop*                _obj_ptr;
 51   bool                _overhead_limit_exceeded;
 52   bool                _allocated_outside_tlab;
 53   size_t              _allocated_tlab_size;
 54   bool                _tlab_end_reset_for_sample;
 55 
 56   bool check_out_of_memory();
 57   void verify_before();
 58   void verify_after();
 59   void notify_allocation();
 60   void notify_allocation_jvmti_sampler();
 61   void notify_allocation_low_memory_detector();
 62   void notify_allocation_jfr_sampler();
 63   void notify_allocation_dtrace_sampler();
 64 #ifdef ASSERT
 65   void check_for_valid_allocation_state() const;
 66 #endif
 67 
 68   class PreserveObj;
 69 
 70 public:
 71   Allocation(const MemAllocator& allocator, oop* obj_ptr)
 72     : _allocator(allocator),
 73       _thread(JavaThread::cast(allocator._thread)), // Do not use Allocation in non-JavaThreads.
 74       _obj_ptr(obj_ptr),
 75       _overhead_limit_exceeded(false),
 76       _allocated_outside_tlab(false),
 77       _allocated_tlab_size(0),
 78       _tlab_end_reset_for_sample(false)
 79   {
 80     assert(Thread::current() == allocator._thread, "do not pass MemAllocator across threads");
 81     verify_before();
 82   }
 83 
 84   ~Allocation() {
 85     if (!check_out_of_memory()) {
 86       notify_allocation();
 87     }
 88   }
 89 
 90   oop obj() const { return *_obj_ptr; }
 91 };
 92 
 93 class MemAllocator::Allocation::PreserveObj: StackObj {
 94   HandleMark _handle_mark;
 95   Handle     _handle;
 96   oop* const _obj_ptr;
 97 
 98 public:
 99   PreserveObj(JavaThread* thread, oop* obj_ptr)
100     : _handle_mark(thread),
101       _handle(thread, *obj_ptr),
102       _obj_ptr(obj_ptr)
103   {
104     *obj_ptr = nullptr;
105   }
106 
107   ~PreserveObj() {
108     *_obj_ptr = _handle();
109   }
110 
111   oop operator()() const {
112     return _handle();
113   }
114 };
115 
116 bool MemAllocator::Allocation::check_out_of_memory() {
117   JavaThread* THREAD = _thread; // For exception macros.
118   assert(!HAS_PENDING_EXCEPTION, "Unexpected exception, will result in uninitialized storage");
119 
120   if (obj() != nullptr) {
121     return false;
122   }
123 
124   const char* message = _overhead_limit_exceeded ? "GC overhead limit exceeded" : "Java heap space";
125   if (!_thread->in_retryable_allocation()) {
126     // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
127     report_java_out_of_memory(message);
128 
129     if (JvmtiExport::should_post_resource_exhausted()) {
130       JvmtiExport::post_resource_exhausted(
131         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP,
132         message);
133     }
134     oop exception = _overhead_limit_exceeded ?
135         Universe::out_of_memory_error_gc_overhead_limit() :
136         Universe::out_of_memory_error_java_heap();
137     THROW_OOP_(exception, true);
138   } else {
139     THROW_OOP_(Universe::out_of_memory_error_retry(), true);
140   }
141 }
142 
143 void MemAllocator::Allocation::verify_before() {
144   // Clear unhandled oops for memory allocation.  Memory allocation might
145   // not take out a lock if from tlab, so clear here.
146   JavaThread* THREAD = _thread; // For exception macros.
147   assert(!HAS_PENDING_EXCEPTION, "Should not allocate with exception pending");
148   debug_only(check_for_valid_allocation_state());
149   assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed");
150 }
151 
152 #ifdef ASSERT
153 void MemAllocator::Allocation::check_for_valid_allocation_state() const {
154   // How to choose between a pending exception and a potential
155   // OutOfMemoryError?  Don't allow pending exceptions.
156   // This is a VM policy failure, so how do we exhaustively test it?
157   assert(!_thread->has_pending_exception(),
158          "shouldn't be allocating with pending exception");
159   // Allocation of an oop can always invoke a safepoint.
160   _thread->check_for_valid_safepoint_state();
161 }
162 #endif
163 
164 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() {
165   // support for JVMTI VMObjectAlloc event (no-op if not enabled)
166   JvmtiExport::vm_object_alloc_event_collector(obj());
167 
168   if (!JvmtiExport::should_post_sampled_object_alloc()) {
169     // Sampling disabled
170     return;
171   }
172 
173   if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) {
174     // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB
175     // or expands it due to taking a sampler induced slow path.
176     return;
177   }
178 
179   // If we want to be sampling, protect the allocated object with a Handle
180   // before doing the callback. The callback is done in the destructor of
181   // the JvmtiSampledObjectAllocEventCollector.
182   size_t bytes_since_last = 0;
183 
184   {
185     PreserveObj obj_h(_thread, _obj_ptr);
186     JvmtiSampledObjectAllocEventCollector collector;
187     size_t size_in_bytes = _allocator._word_size * HeapWordSize;
188     ThreadLocalAllocBuffer& tlab = _thread->tlab();
189 
190     if (!_allocated_outside_tlab) {
191       bytes_since_last = tlab.bytes_since_last_sample_point();
192     }
193 
194     _thread->heap_sampler().check_for_sampling(obj_h(), size_in_bytes, bytes_since_last);
195   }
196 
197   if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) {
198     // Tell tlab to forget bytes_since_last if we passed it to the heap sampler.
199     _thread->tlab().set_sample_end(bytes_since_last != 0);
200   }
201 }
202 
203 void MemAllocator::Allocation::notify_allocation_low_memory_detector() {
204   // support low memory notifications (no-op if not enabled)
205   LowMemoryDetector::detect_low_memory_for_collected_pools();
206 }
207 
208 void MemAllocator::Allocation::notify_allocation_jfr_sampler() {
209   HeapWord* mem = cast_from_oop<HeapWord*>(obj());
210   size_t size_in_bytes = _allocator._word_size * HeapWordSize;
211 
212   if (_allocated_outside_tlab) {
213     AllocTracer::send_allocation_outside_tlab(obj()->klass(), mem, size_in_bytes, _thread);
214   } else if (_allocated_tlab_size != 0) {
215     // TLAB was refilled
216     AllocTracer::send_allocation_in_new_tlab(obj()->klass(), mem, _allocated_tlab_size * HeapWordSize,
217                                              size_in_bytes, _thread);
218   }
219 }
220 
221 void MemAllocator::Allocation::notify_allocation_dtrace_sampler() {
222   if (DTraceAllocProbes) {
223     // support for Dtrace object alloc event (no-op most of the time)
224     Klass* klass = obj()->klass();
225     size_t word_size = _allocator._word_size;
226     if (klass != nullptr && klass->name() != nullptr) {
227       SharedRuntime::dtrace_object_alloc(_thread, obj(), word_size);
228     }
229   }
230 }
231 
232 void MemAllocator::Allocation::notify_allocation() {
233   notify_allocation_low_memory_detector();
234   notify_allocation_jfr_sampler();
235   notify_allocation_dtrace_sampler();
236   notify_allocation_jvmti_sampler();
237 }
238 
239 HeapWord* MemAllocator::mem_allocate_outside_tlab(Allocation& allocation) const {
240   allocation._allocated_outside_tlab = true;
241   HeapWord* mem = Universe::heap()->mem_allocate(_word_size, &allocation._overhead_limit_exceeded);
242   if (mem == nullptr) {
243     return mem;
244   }
245 
246   size_t size_in_bytes = _word_size * HeapWordSize;
247   _thread->incr_allocated_bytes(size_in_bytes);
248 
249   return mem;
250 }
251 
252 HeapWord* MemAllocator::mem_allocate_inside_tlab(Allocation& allocation) const {
253   assert(UseTLAB, "should use UseTLAB");
254 
255   // Try allocating from an existing TLAB.
256   HeapWord* mem = mem_allocate_inside_tlab_fast();
257   if (mem != nullptr) {
258     return mem;
259   }
260 
261   // Try refilling the TLAB and allocating the object in it.
262   return mem_allocate_inside_tlab_slow(allocation);
263 }
264 
265 HeapWord* MemAllocator::mem_allocate_inside_tlab_fast() const {
266   return _thread->tlab().allocate(_word_size);
267 }
268 
269 HeapWord* MemAllocator::mem_allocate_inside_tlab_slow(Allocation& allocation) const {
270   HeapWord* mem = nullptr;
271   ThreadLocalAllocBuffer& tlab = _thread->tlab();
272 
273   if (JvmtiExport::should_post_sampled_object_alloc()) {
274     tlab.set_back_allocation_end();
275     mem = tlab.allocate(_word_size);
276 
277     // We set back the allocation sample point to try to allocate this, reset it
278     // when done.
279     allocation._tlab_end_reset_for_sample = true;
280 
281     if (mem != nullptr) {
282       return mem;
283     }
284   }
285 
286   // Retain tlab and allocate object in shared space if
287   // the amount free in the tlab is too large to discard.
288   if (tlab.free() > tlab.refill_waste_limit()) {
289     tlab.record_slow_allocation(_word_size);
290     return nullptr;
291   }
292 
293   // Discard tlab and allocate a new one.
294   // To minimize fragmentation, the last TLAB may be smaller than the rest.
295   size_t new_tlab_size = tlab.compute_size(_word_size);
296 
297   tlab.retire_before_allocation();
298 
299   if (new_tlab_size == 0) {
300     return nullptr;
301   }
302 
303   // Allocate a new TLAB requesting new_tlab_size. Any size
304   // between minimal and new_tlab_size is accepted.
305   size_t min_tlab_size = ThreadLocalAllocBuffer::compute_min_size(_word_size);
306   mem = Universe::heap()->allocate_new_tlab(min_tlab_size, new_tlab_size, &allocation._allocated_tlab_size);
307   if (mem == nullptr) {
308     assert(allocation._allocated_tlab_size == 0,
309            "Allocation failed, but actual size was updated. min: " SIZE_FORMAT
310            ", desired: " SIZE_FORMAT ", actual: " SIZE_FORMAT,
311            min_tlab_size, new_tlab_size, allocation._allocated_tlab_size);
312     return nullptr;
313   }
314   assert(allocation._allocated_tlab_size != 0, "Allocation succeeded but actual size not updated. mem at: "
315          PTR_FORMAT " min: " SIZE_FORMAT ", desired: " SIZE_FORMAT,
316          p2i(mem), min_tlab_size, new_tlab_size);
317 
318   if (ZeroTLAB) {
319     // ..and clear it.
320     Copy::zero_to_words(mem, allocation._allocated_tlab_size);
321   } else {
322     // ...and zap just allocated object.
323 #ifdef ASSERT
324     // Skip mangling the space corresponding to the object header to
325     // ensure that the returned space is not considered parsable by
326     // any concurrent GC thread.
327     size_t hdr_size = oopDesc::header_size();
328     Copy::fill_to_words(mem + hdr_size, allocation._allocated_tlab_size - hdr_size, badHeapWordVal);
329 #endif // ASSERT
330   }
331 
332   tlab.fill(mem, mem + _word_size, allocation._allocated_tlab_size);
333   return mem;
334 }
335 
336 
337 HeapWord* MemAllocator::mem_allocate_slow(Allocation& allocation) const {
338   // Allocation of an oop can always invoke a safepoint.
339   debug_only(allocation._thread->check_for_valid_safepoint_state());
340 
341   if (UseTLAB) {
342     // Try refilling the TLAB and allocating the object in it.
343     HeapWord* mem = mem_allocate_inside_tlab_slow(allocation);
344     if (mem != nullptr) {
345       return mem;
346     }
347   }
348 
349   return mem_allocate_outside_tlab(allocation);
350 }
351 
352 HeapWord* MemAllocator::mem_allocate(Allocation& allocation) const {
353   if (UseTLAB) {
354     // Try allocating from an existing TLAB.
355     HeapWord* mem = mem_allocate_inside_tlab_fast();
356     if (mem != nullptr) {
357       return mem;
358     }
359   }
360 
361   return mem_allocate_slow(allocation);
362 }
363 
364 oop MemAllocator::allocate() const {
365   oop obj = nullptr;
366   {
367     Allocation allocation(*this, &obj);
368     HeapWord* mem = mem_allocate(allocation);
369     if (mem != nullptr) {
370       obj = initialize(mem);
371     } else {
372       // The unhandled oop detector will poison local variable obj,
373       // so reset it to null if mem is null.
374       obj = nullptr;
375     }
376   }
377   return obj;
378 }
379 
380 void MemAllocator::mem_clear(HeapWord* mem) const {
381   assert(mem != nullptr, "cannot initialize null object");
382   const size_t hs = oopDesc::header_size();
383   assert(_word_size >= hs, "unexpected object size");
384   oopDesc::set_klass_gap(mem, 0);
385   Copy::fill_to_aligned_words(mem + hs, _word_size - hs);
386 }
387 
388 oop MemAllocator::finish(HeapWord* mem) const {
389   assert(mem != nullptr, "null object pointer");
390   // May be bootstrapping
391   oopDesc::set_mark(mem, markWord::prototype());
392   // Need a release store to ensure array/class length, mark word, and
393   // object zeroing are visible before setting the klass non-null, for
394   // concurrent collectors.
395   oopDesc::release_set_klass(mem, _klass);
396   return cast_to_oop(mem);
397 }
398 
399 oop ObjAllocator::initialize(HeapWord* mem) const {
400   mem_clear(mem);
401   return finish(mem);
402 }
403 
404 oop ObjArrayAllocator::initialize(HeapWord* mem) const {
405   // Set array length before setting the _klass field because a
406   // non-null klass field indicates that the object is parsable by
407   // concurrent GC.
408   assert(_length >= 0, "length should be non-negative");
409   if (_do_zero) {
410     mem_clear(mem);
411   }
412   arrayOopDesc::set_length(mem, _length);
413   return finish(mem);
414 }
415 
416 oop ClassAllocator::initialize(HeapWord* mem) const {
417   // Set oop_size field before setting the _klass field because a
418   // non-null _klass field indicates that the object is parsable by
419   // concurrent GC.
420   assert(_word_size > 0, "oop_size must be positive.");
421   mem_clear(mem);
422   java_lang_Class::set_oop_size(mem, _word_size);
423   return finish(mem);
424 }