1 /*
  2  * Copyright (c) 2015, 2022, Red Hat, Inc. All rights reserved.
  3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  4  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
  5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  6  *
  7  * This code is free software; you can redistribute it and/or modify it
  8  * under the terms of the GNU General Public License version 2 only, as
  9  * published by the Free Software Foundation.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP
 28 #define SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP
 29 
 30 #include "gc/shenandoah/shenandoahMark.hpp"
 31 
 32 #include "gc/shared/continuationGCSupport.inline.hpp"
 33 #include "gc/shenandoah/shenandoahAgeCensus.hpp"
 34 #include "gc/shenandoah/shenandoahAsserts.hpp"
 35 #include "gc/shenandoah/shenandoahBarrierSet.inline.hpp"
 36 #include "gc/shenandoah/shenandoahClosures.inline.hpp"
 37 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 38 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
 39 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
 40 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
 41 #include "gc/shenandoah/shenandoahStringDedup.inline.hpp"
 42 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
 43 #include "gc/shenandoah/shenandoahUtils.hpp"
 44 #include "memory/iterator.inline.hpp"
 45 #include "oops/compressedOops.inline.hpp"
 46 #include "oops/oop.inline.hpp"
 47 #include "runtime/prefetch.inline.hpp"
 48 #include "utilities/devirtualizer.inline.hpp"
 49 #include "utilities/powerOfTwo.hpp"
 50 
 51 template <StringDedupMode STRING_DEDUP>
 52 void ShenandoahMark::dedup_string(oop obj, StringDedup::Requests* const req) {
 53   if (STRING_DEDUP == ENQUEUE_DEDUP) {
 54     if (ShenandoahStringDedup::is_candidate(obj)) {
 55       req->add(obj);
 56     }
 57   } else if (STRING_DEDUP == ALWAYS_DEDUP) {
 58     if (ShenandoahStringDedup::is_string_candidate(obj) &&
 59         !ShenandoahStringDedup::dedup_requested(obj)) {
 60         req->add(obj);
 61     }
 62   }
 63 }
 64 
 65 template <class T, ShenandoahGenerationType GENERATION, StringDedupMode STRING_DEDUP>
 66 void ShenandoahMark::do_task(ShenandoahObjToScanQueue* q, T* cl, ShenandoahLiveData* live_data, StringDedup::Requests* const req, ShenandoahMarkTask* task, uint worker_id) {
 67   oop obj = task->obj();
 68 
 69   shenandoah_assert_not_forwarded(nullptr, obj);
 70   shenandoah_assert_marked(nullptr, obj);
 71   shenandoah_assert_not_in_cset_except(nullptr, obj, ShenandoahHeap::heap()->cancelled_gc());
 72 
 73   // Are we in weak subgraph scan?
 74   bool weak = task->is_weak();
 75   cl->set_weak(weak);
 76 
 77   if (task->is_not_chunked()) {
 78     if (obj->is_instance()) {
 79       // Case 1: Normal oop, process as usual.
 80       if (ContinuationGCSupport::relativize_stack_chunk(obj)) {
 81           // Loom doesn't support mixing of weak marking and strong marking of
 82           // stack chunks.
 83           cl->set_weak(false);
 84       }
 85 
 86       obj->oop_iterate(cl);
 87       dedup_string<STRING_DEDUP>(obj, req);
 88     } else if (obj->is_refArray()) {
 89       // Case 2: Object array instance and no chunk is set. Must be the first
 90       // time we visit it, start the chunked processing.
 91       do_chunked_array_start<T>(q, cl, obj, weak);
 92     } else {
 93       // Case 3: Primitive array. Do nothing, no oops there. We use the same
 94       // performance tweak TypeArrayKlass::oop_oop_iterate_impl is using:
 95       // We skip iterating over the klass pointer since we know that
 96       // Universe::TypeArrayKlass never moves.
 97       assert (obj->is_typeArray(), "should be type array");
 98     }
 99     // Count liveness the last: push the outstanding work to the queues first
100     // Avoid double-counting objects that are visited twice due to upgrade
101     // from final- to strong mark.
102     if (task->count_liveness()) {
103       count_liveness<GENERATION>(live_data, obj, worker_id);
104     }
105   } else {
106     // Case 4: Array chunk, has sensible chunk id. Process it.
107     do_chunked_array<T>(q, cl, obj, task->chunk(), task->pow(), weak);
108   }
109 }
110 
111 template <ShenandoahGenerationType GENERATION>
112 inline void ShenandoahMark::count_liveness(ShenandoahLiveData* live_data, oop obj, uint worker_id) {
113   const ShenandoahHeap* const heap = ShenandoahHeap::heap();
114   const size_t region_idx = heap->heap_region_index_containing(obj);
115   ShenandoahHeapRegion* const region = heap->get_region(region_idx);
116   const size_t size = obj->size();
117 
118   // Age census for objects in the young generation
119   if (GENERATION == YOUNG || (GENERATION == GLOBAL && region->is_young())) {
120     assert(heap->mode()->is_generational(), "Only if generational");
121     if (ShenandoahGenerationalAdaptiveTenuring && !ShenandoahGenerationalCensusAtEvac) {
122       assert(region->is_young(), "Only for young objects");
123       uint age = ShenandoahHeap::get_object_age(obj);
124       ShenandoahAgeCensus* const census = ShenandoahGenerationalHeap::heap()->age_census();
125       CENSUS_NOISE(census->add(age, region->age(), region->youth(), size, worker_id);)
126       NO_CENSUS_NOISE(census->add(age, region->age(), size, worker_id);)
127     }
128   }
129 
130   if (!region->is_humongous_start()) {
131     assert(!region->is_humongous(), "Cannot have continuations here");
132     assert(region->is_affiliated(), "Do not count live data within Free Regular Region %zu", region_idx);
133     ShenandoahLiveData cur = live_data[region_idx];
134     size_t new_val = size + cur;
135     if (new_val >= SHENANDOAH_LIVEDATA_MAX) {
136       // overflow, flush to region data
137       region->increase_live_data_gc_words(new_val);
138       live_data[region_idx] = 0;
139     } else {
140       // still good, remember in locals
141       live_data[region_idx] = (ShenandoahLiveData) new_val;
142     }
143   } else {
144     shenandoah_assert_in_correct_region(nullptr, obj);
145     size_t num_regions = ShenandoahHeapRegion::required_regions(size * HeapWordSize);
146 
147     assert(region->is_affiliated(), "Do not count live data within FREE Humongous Start Region %zu", region_idx);
148     for (size_t i = region_idx; i < region_idx + num_regions; i++) {
149       ShenandoahHeapRegion* chain_reg = heap->get_region(i);
150       assert(chain_reg->is_humongous(), "Expecting a humongous region");
151       assert(chain_reg->is_affiliated(), "Do not count live data within FREE Humongous Continuation Region %zu", i);
152       chain_reg->increase_live_data_gc_words(chain_reg->used() >> LogHeapWordSize);
153     }
154   }
155 }
156 
157 template <class T>
158 inline void ShenandoahMark::do_chunked_array_start(ShenandoahObjToScanQueue* q, T* cl, oop obj, bool weak) {
159   assert(obj->is_refArray(), "expect object array");
160   objArrayOop array = objArrayOop(obj);
161   int len = array->length();
162 
163   // Mark objArray klass metadata
164   if (Devirtualizer::do_metadata(cl)) {
165     Devirtualizer::do_klass(cl, array->klass());
166   }
167 
168   if (len <= (int) ObjArrayMarkingStride*2) {
169     // A few slices only, process directly
170     array->oop_iterate_range(cl, 0, len);
171   } else {
172     int bits = log2i_graceful(len);
173     // Compensate for non-power-of-two arrays, cover the array in excess:
174     if (len != (1 << bits)) bits++;
175 
176     // Only allow full chunks on the queue. This frees do_chunked_array() from checking from/to
177     // boundaries against array->length(), touching the array header on every chunk.
178     //
179     // To do this, we cut the prefix in full-sized chunks, and submit them on the queue.
180     // If the array is not divided in chunk sizes, then there would be an irregular tail,
181     // which we will process separately.
182 
183     int last_idx = 0;
184 
185     int chunk = 1;
186     int pow = bits;
187 
188     // Handle overflow
189     if (pow >= 31) {
190       assert (pow == 31, "sanity");
191       pow--;
192       chunk = 2;
193       last_idx = (1 << pow);
194       bool pushed = q->push(ShenandoahMarkTask(array, true, weak, 1, pow));
195       assert(pushed, "overflow queue should always succeed pushing");
196     }
197 
198     // Split out tasks, as suggested in ShenandoahMarkTask docs. Record the last
199     // successful right boundary to figure out the irregular tail.
200     while ((1 << pow) > (int)ObjArrayMarkingStride &&
201            (chunk*2 < ShenandoahMarkTask::chunk_size())) {
202       pow--;
203       int left_chunk = chunk*2 - 1;
204       int right_chunk = chunk*2;
205       int left_chunk_end = left_chunk * (1 << pow);
206       if (left_chunk_end < len) {
207         bool pushed = q->push(ShenandoahMarkTask(array, true, weak, left_chunk, pow));
208         assert(pushed, "overflow queue should always succeed pushing");
209         chunk = right_chunk;
210         last_idx = left_chunk_end;
211       } else {
212         chunk = left_chunk;
213       }
214     }
215 
216     // Process the irregular tail, if present
217     int from = last_idx;
218     if (from < len) {
219       array->oop_iterate_range(cl, from, len);
220     }
221   }
222 }
223 
224 template <class T>
225 inline void ShenandoahMark::do_chunked_array(ShenandoahObjToScanQueue* q, T* cl, oop obj, int chunk, int pow, bool weak) {
226   assert(obj->is_refArray(), "expect object array");
227   objArrayOop array = objArrayOop(obj);
228 
229   assert (ObjArrayMarkingStride > 0, "sanity");
230 
231   // Split out tasks, as suggested in ShenandoahMarkTask docs. Avoid pushing tasks that
232   // are known to start beyond the array.
233   while ((1 << pow) > (int)ObjArrayMarkingStride && (chunk*2 < ShenandoahMarkTask::chunk_size())) {
234     pow--;
235     chunk *= 2;
236     bool pushed = q->push(ShenandoahMarkTask(array, true, weak, chunk - 1, pow));
237     assert(pushed, "overflow queue should always succeed pushing");
238   }
239 
240   int chunk_size = 1 << pow;
241 
242   int from = (chunk - 1) * chunk_size;
243   int to = chunk * chunk_size;
244 
245 #ifdef ASSERT
246   int len = array->length();
247   assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
248   assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
249 #endif
250 
251   array->oop_iterate_range(cl, from, to);
252 }
253 
254 template <ShenandoahGenerationType GENERATION>
255 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
256 private:
257   ShenandoahObjToScanQueue* _queue;
258   ShenandoahObjToScanQueue* _old_queue;
259   ShenandoahHeap* _heap;
260   ShenandoahMarkingContext* const _mark_context;
261 public:
262   ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old_q) :
263     _queue(q),
264     _old_queue(old_q),
265     _heap(ShenandoahHeap::heap()),
266     _mark_context(_heap->marking_context())
267   {
268   }
269 
270   void do_buffer(void **buffer, size_t size) {
271     assert(size == 0 || !_heap->has_forwarded_objects() || _heap->is_concurrent_old_mark_in_progress(), "Forwarded objects are not expected here");
272     for (size_t i = 0; i < size; ++i) {
273       oop *p = (oop *) &buffer[i];
274       ShenandoahMark::mark_through_ref<oop, GENERATION>(p, _queue, _old_queue, _mark_context, false);
275     }
276   }
277 };
278 
279 template<ShenandoahGenerationType GENERATION>
280 bool ShenandoahMark::in_generation(ShenandoahHeap* const heap, oop obj) {
281   // Each in-line expansion of in_generation() resolves GENERATION at compile time.
282   if (GENERATION == YOUNG) {
283     return heap->is_in_young(obj);
284   }
285 
286   if (GENERATION == OLD) {
287     return heap->is_in_old(obj);
288   }
289 
290   assert((GENERATION == GLOBAL || GENERATION == NON_GEN), "Unexpected generation type");
291   assert(heap->is_in(obj), "Object must be in heap");
292   return true;
293 }
294 
295 template<class T, ShenandoahGenerationType GENERATION>
296 inline void ShenandoahMark::mark_through_ref(T *p, ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old_q, ShenandoahMarkingContext* const mark_context, bool weak) {
297   // Note: This is a very hot code path, so the code should be conditional on GENERATION template
298   // parameter where possible, in order to generate the most efficient code.
299 
300   T o = RawAccess<>::oop_load(p);
301   if (!CompressedOops::is_null(o)) {
302     oop obj = CompressedOops::decode_not_null(o);
303 
304     ShenandoahGenerationalHeap* heap = ShenandoahGenerationalHeap::heap();
305     shenandoah_assert_not_forwarded(p, obj);
306     shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
307     if (in_generation<GENERATION>(heap, obj)) {
308       mark_ref(q, mark_context, weak, obj);
309       shenandoah_assert_marked(p, obj);
310       if (GENERATION == YOUNG && heap->is_in_old(p)) {
311         // Mark card as dirty because remembered set scanning still finds interesting pointer.
312         heap->old_generation()->mark_card_as_dirty((HeapWord*)p);
313       } else if (GENERATION == GLOBAL && heap->is_in_old(p) && heap->is_in_young(obj)) {
314         // Mark card as dirty because GLOBAL marking finds interesting pointer.
315         heap->old_generation()->mark_card_as_dirty((HeapWord*)p);
316       }
317     } else if (old_q != nullptr) {
318       // Young mark, bootstrapping old_q or concurrent with old_q marking.
319       mark_ref(old_q, mark_context, weak, obj);
320       shenandoah_assert_marked(p, obj);
321     } else if (GENERATION == OLD) {
322       // Old mark, found a young pointer.
323       if (heap->is_in(p)) {
324         assert(heap->is_in_young(obj), "Expected young object.");
325         heap->old_generation()->mark_card_as_dirty(p);
326       }
327     }
328   }
329 }
330 
331 template<>
332 inline void ShenandoahMark::mark_through_ref<oop, ShenandoahGenerationType::NON_GEN>(oop *p, ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old_q, ShenandoahMarkingContext* const mark_context, bool weak) {
333   mark_non_generational_ref(p, q, mark_context, weak);
334 }
335 
336 template<>
337 inline void ShenandoahMark::mark_through_ref<narrowOop, ShenandoahGenerationType::NON_GEN>(narrowOop *p, ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old_q, ShenandoahMarkingContext* const mark_context, bool weak) {
338   mark_non_generational_ref(p, q, mark_context, weak);
339 }
340 
341 template<class T>
342 inline void ShenandoahMark::mark_non_generational_ref(T* p, ShenandoahObjToScanQueue* q,
343                                                       ShenandoahMarkingContext* const mark_context, bool weak) {
344   oop o = RawAccess<>::oop_load(p);
345   if (!CompressedOops::is_null(o)) {
346     oop obj = CompressedOops::decode_not_null(o);
347 
348     shenandoah_assert_not_forwarded(p, obj);
349     shenandoah_assert_not_in_cset_except(p, obj, ShenandoahHeap::heap()->cancelled_gc());
350 
351     mark_ref(q, mark_context, weak, obj);
352 
353     shenandoah_assert_marked(p, obj);
354   }
355 }
356 
357 inline void ShenandoahMark::mark_ref(ShenandoahObjToScanQueue* q,
358                               ShenandoahMarkingContext* const mark_context,
359                               bool weak, oop obj) {
360   bool skip_live = false;
361   bool marked;
362   if (weak) {
363     marked = mark_context->mark_weak(obj);
364   } else {
365     marked = mark_context->mark_strong(obj, /* was_upgraded = */ skip_live);
366   }
367   if (marked) {
368     bool pushed = q->push(ShenandoahMarkTask(obj, skip_live, weak));
369     assert(pushed, "overflow queue should always succeed pushing");
370   }
371 }
372 
373 ShenandoahObjToScanQueueSet* ShenandoahMark::task_queues() const {
374   return _task_queues;
375 }
376 
377 ShenandoahObjToScanQueue* ShenandoahMark::get_queue(uint index) const {
378   return _task_queues->queue(index);
379 }
380 
381 ShenandoahObjToScanQueue* ShenandoahMark::get_old_queue(uint index) const {
382   if (_old_gen_task_queues != nullptr) {
383     return _old_gen_task_queues->queue(index);
384   }
385   return nullptr;
386 }
387 
388 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP