215 markWord m = obj->mark();
216 if (m.is_forwarded()) {
217 obj = obj->forwardee(m);
218 } else {
219 obj = do_copy_to_survivor_space(region_attr, obj, m);
220 }
221 RawAccess<IS_NOT_NULL>::oop_store(p, obj);
222
223 write_ref_field_post(p, obj);
224 }
225
226 MAYBE_INLINE_EVACUATION
227 void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stolen) {
228 // Access state before release by claim().
229 objArrayOop to_array = objArrayOop(state->destination());
230 PartialArraySplitter::Claim claim =
231 _partial_array_splitter.claim(state, _task_queue, stolen);
232 G1HeapRegionAttr dest_attr = _g1h->region_attr(to_array);
233 G1SkipCardEnqueueSetter x(&_scanner, dest_attr.is_new_survivor());
234 // Process claimed task.
235 to_array->oop_iterate_range(&_scanner,
236 checked_cast<int>(claim._start),
237 checked_cast<int>(claim._end));
238 }
239
240 MAYBE_INLINE_EVACUATION
241 void G1ParScanThreadState::start_partial_objarray(oop from_obj,
242 oop to_obj) {
243 assert(from_obj->is_forwarded(), "precondition");
244 assert(from_obj->forwardee() == to_obj, "precondition");
245 assert(to_obj->is_objArray(), "precondition");
246
247 objArrayOop to_array = objArrayOop(to_obj);
248 size_t array_length = to_array->length();
249 size_t initial_chunk_size =
250 // The source array is unused when processing states.
251 _partial_array_splitter.start(_task_queue, nullptr, to_array, array_length);
252
253 assert(_scanner.skip_card_enqueue_set(), "must be");
254 // Process the initial chunk. No need to process the type in the
255 // klass, as it will already be handled by processing the built-in
256 // module.
257 to_array->oop_iterate_range(&_scanner, 0, checked_cast<int>(initial_chunk_size));
258 }
259
260 MAYBE_INLINE_EVACUATION
261 void G1ParScanThreadState::dispatch_task(ScannerTask task, bool stolen) {
262 verify_task(task);
263 if (task.is_narrow_oop_ptr()) {
264 do_oop_evac(task.to_narrow_oop_ptr());
265 } else if (task.is_oop_ptr()) {
266 do_oop_evac(task.to_oop_ptr());
267 } else {
268 do_partial_array(task.to_partial_array_state(), stolen);
269 }
270 }
271
272 // Process tasks until overflow queue is empty and local queue
273 // contains no more than threshold entries. NOINLINE to prevent
274 // inlining into steal_and_trim_queue.
275 ATTRIBUTE_FLATTEN NOINLINE
276 void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
277 ScannerTask task;
408 size_t word_sz,
409 uint node_index) {
410 _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
411 }
412
413 void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
414 HeapWord* obj_start = cast_from_oop<HeapWord*>(obj);
415 G1HeapRegion* region = _g1h->heap_region_containing(obj_start);
416 region->update_bot_for_block(obj_start, obj_start + word_sz);
417 }
418
419 ALWAYSINLINE
420 void G1ParScanThreadState::do_iterate_object(oop const obj,
421 oop const old,
422 Klass* const klass,
423 G1HeapRegionAttr const region_attr,
424 G1HeapRegionAttr const dest_attr,
425 uint age) {
426 // Most objects are not arrays, so do one array check rather than
427 // checking for each array category for each object.
428 if (klass->is_array_klass()) {
429 assert(!klass->is_stack_chunk_instance_klass(), "must be");
430
431 if (klass->is_objArray_klass()) {
432 start_partial_objarray(old, obj);
433 } else {
434 // Nothing needs to be done for typeArrays. Body doesn't contain
435 // any oops to scan, and the type in the klass will already be handled
436 // by processing the built-in module.
437 assert(klass->is_typeArray_klass(), "invariant");
438 }
439 return;
440 }
441
442 ContinuationGCSupport::transform_stack_chunk(obj);
443
444 // Check for deduplicating young Strings.
445 if (G1StringDedup::is_candidate_from_evacuation(klass,
446 region_attr,
447 dest_attr,
448 age)) {
449 // Record old; request adds a new weak reference, which reference
450 // processing expects to refer to a from-space object.
451 _string_dedup_requests.add(old);
452 }
453
454 assert(_scanner.skip_card_enqueue_set(), "must be");
455 obj->oop_iterate_backwards(&_scanner, klass);
456 }
457
|
215 markWord m = obj->mark();
216 if (m.is_forwarded()) {
217 obj = obj->forwardee(m);
218 } else {
219 obj = do_copy_to_survivor_space(region_attr, obj, m);
220 }
221 RawAccess<IS_NOT_NULL>::oop_store(p, obj);
222
223 write_ref_field_post(p, obj);
224 }
225
226 MAYBE_INLINE_EVACUATION
227 void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stolen) {
228 // Access state before release by claim().
229 objArrayOop to_array = objArrayOop(state->destination());
230 PartialArraySplitter::Claim claim =
231 _partial_array_splitter.claim(state, _task_queue, stolen);
232 G1HeapRegionAttr dest_attr = _g1h->region_attr(to_array);
233 G1SkipCardEnqueueSetter x(&_scanner, dest_attr.is_new_survivor());
234 // Process claimed task.
235 assert(to_array->is_refArray(), "Must be");
236 refArrayOop(to_array)->oop_iterate_range(&_scanner,
237 checked_cast<int>(claim._start),
238 checked_cast<int>(claim._end));
239 }
240
241 MAYBE_INLINE_EVACUATION
242 void G1ParScanThreadState::start_partial_objarray(oop from_obj,
243 oop to_obj) {
244 assert(from_obj->is_forwarded(), "precondition");
245 assert(from_obj->forwardee() == to_obj, "precondition");
246 assert(to_obj->is_objArray(), "precondition");
247
248 objArrayOop to_array = objArrayOop(to_obj);
249 size_t array_length = to_array->length();
250 size_t initial_chunk_size =
251 // The source array is unused when processing states.
252 _partial_array_splitter.start(_task_queue, nullptr, to_array, array_length);
253
254 assert(_scanner.skip_card_enqueue_set(), "must be");
255 // Process the initial chunk. No need to process the type in the
256 // klass, as it will already be handled by processing the built-in
257 // module.
258 assert(to_array->is_refArray(), "Must be");
259 refArrayOop(to_array)->oop_iterate_range(&_scanner, 0, checked_cast<int>(initial_chunk_size));
260 }
261
262 MAYBE_INLINE_EVACUATION
263 void G1ParScanThreadState::dispatch_task(ScannerTask task, bool stolen) {
264 verify_task(task);
265 if (task.is_narrow_oop_ptr()) {
266 do_oop_evac(task.to_narrow_oop_ptr());
267 } else if (task.is_oop_ptr()) {
268 do_oop_evac(task.to_oop_ptr());
269 } else {
270 do_partial_array(task.to_partial_array_state(), stolen);
271 }
272 }
273
274 // Process tasks until overflow queue is empty and local queue
275 // contains no more than threshold entries. NOINLINE to prevent
276 // inlining into steal_and_trim_queue.
277 ATTRIBUTE_FLATTEN NOINLINE
278 void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
279 ScannerTask task;
410 size_t word_sz,
411 uint node_index) {
412 _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
413 }
414
415 void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
416 HeapWord* obj_start = cast_from_oop<HeapWord*>(obj);
417 G1HeapRegion* region = _g1h->heap_region_containing(obj_start);
418 region->update_bot_for_block(obj_start, obj_start + word_sz);
419 }
420
421 ALWAYSINLINE
422 void G1ParScanThreadState::do_iterate_object(oop const obj,
423 oop const old,
424 Klass* const klass,
425 G1HeapRegionAttr const region_attr,
426 G1HeapRegionAttr const dest_attr,
427 uint age) {
428 // Most objects are not arrays, so do one array check rather than
429 // checking for each array category for each object.
430 if (klass->is_array_klass() && !klass->is_flatArray_klass()) {
431 assert(!klass->is_stack_chunk_instance_klass(), "must be");
432
433 if (klass->is_refArray_klass()) {
434 start_partial_objarray(old, obj);
435 } else {
436 // Nothing needs to be done for typeArrays. Body doesn't contain
437 // any oops to scan, and the type in the klass will already be handled
438 // by processing the built-in module.
439 assert(klass->is_typeArray_klass() || klass->is_objArray_klass(), "invariant");
440 }
441 return;
442 }
443
444 ContinuationGCSupport::transform_stack_chunk(obj);
445
446 // Check for deduplicating young Strings.
447 if (G1StringDedup::is_candidate_from_evacuation(klass,
448 region_attr,
449 dest_attr,
450 age)) {
451 // Record old; request adds a new weak reference, which reference
452 // processing expects to refer to a from-space object.
453 _string_dedup_requests.add(old);
454 }
455
456 assert(_scanner.skip_card_enqueue_set(), "must be");
457 obj->oop_iterate_backwards(&_scanner, klass);
458 }
459
|