220 markWord m = obj->mark();
221 if (m.is_forwarded()) {
222 obj = obj->forwardee(m);
223 } else {
224 obj = do_copy_to_survivor_space(region_attr, obj, m);
225 }
226 RawAccess<IS_NOT_NULL>::oop_store(p, obj);
227
228 write_ref_field_post(p, obj);
229 }
230
231 MAYBE_INLINE_EVACUATION
232 void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stolen) {
233 // Access state before release by claim().
234 objArrayOop to_array = objArrayOop(state->destination());
235 PartialArraySplitter::Claim claim =
236 _partial_array_splitter.claim(state, _task_queue, stolen);
237 G1HeapRegionAttr dest_attr = _g1h->region_attr(to_array);
238 G1SkipCardMarkSetter x(&_scanner, dest_attr.is_new_survivor());
239 // Process claimed task.
240 to_array->oop_iterate_elements_range(&_scanner,
241 checked_cast<int>(claim._start),
242 checked_cast<int>(claim._end));
243 }
244
245 MAYBE_INLINE_EVACUATION
246 void G1ParScanThreadState::start_partial_objarray(oop from_obj,
247 oop to_obj) {
248 assert(from_obj->is_forwarded(), "precondition");
249 assert(from_obj->forwardee() == to_obj, "precondition");
250 assert(to_obj->is_objArray(), "precondition");
251
252 objArrayOop to_array = objArrayOop(to_obj);
253 size_t array_length = to_array->length();
254 size_t initial_chunk_size =
255 // The source array is unused when processing states.
256 _partial_array_splitter.start(_task_queue, nullptr, to_array, array_length);
257
258 assert(_scanner.skip_card_mark_set(), "must be");
259 // Process the initial chunk. No need to process the type in the
260 // klass, as it will already be handled by processing the built-in
261 // module.
262 to_array->oop_iterate_elements_range(&_scanner, 0, checked_cast<int>(initial_chunk_size));
263 }
264
265 MAYBE_INLINE_EVACUATION
266 void G1ParScanThreadState::dispatch_task(ScannerTask task, bool stolen) {
267 verify_task(task);
268 if (task.is_narrow_oop_ptr()) {
269 do_oop_evac(task.to_narrow_oop_ptr());
270 } else if (task.is_oop_ptr()) {
271 do_oop_evac(task.to_oop_ptr());
272 } else {
273 do_partial_array(task.to_partial_array_state(), stolen);
274 }
275 }
276
277 // Process tasks until overflow queue is empty and local queue
278 // contains no more than threshold entries. NOINLINE to prevent
279 // inlining into steal_and_trim_queue.
280 ATTRIBUTE_FLATTEN NOINLINE
281 void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
282 ScannerTask task;
413 size_t word_sz,
414 uint node_index) {
415 _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
416 }
417
418 void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
419 HeapWord* obj_start = cast_from_oop<HeapWord*>(obj);
420 G1HeapRegion* region = _g1h->heap_region_containing(obj_start);
421 region->update_bot_for_block(obj_start, obj_start + word_sz);
422 }
423
424 ALWAYSINLINE
425 void G1ParScanThreadState::do_iterate_object(oop const obj,
426 oop const old,
427 Klass* const klass,
428 G1HeapRegionAttr const region_attr,
429 G1HeapRegionAttr const dest_attr,
430 uint age) {
431 // Most objects are not arrays, so do one array check rather than
432 // checking for each array category for each object.
433 if (klass->is_array_klass()) {
434 assert(!klass->is_stack_chunk_instance_klass(), "must be");
435
436 if (klass->is_objArray_klass()) {
437 start_partial_objarray(old, obj);
438 } else {
439 // Nothing needs to be done for typeArrays. Body doesn't contain
440 // any oops to scan, and the type in the klass will already be handled
441 // by processing the built-in module.
442 assert(klass->is_typeArray_klass(), "invariant");
443 }
444 return;
445 }
446
447 ContinuationGCSupport::transform_stack_chunk(obj);
448
449 // Check for deduplicating young Strings.
450 if (G1StringDedup::is_candidate_from_evacuation(klass,
451 region_attr,
452 dest_attr,
453 age)) {
454 // Record old; request adds a new weak reference, which reference
455 // processing expects to refer to a from-space object.
456 _string_dedup_requests.add(old);
457 }
458
459 assert(_scanner.skip_card_mark_set(), "must be");
460 obj->oop_iterate_backwards(&_scanner, klass);
461 }
462
|
220 markWord m = obj->mark();
221 if (m.is_forwarded()) {
222 obj = obj->forwardee(m);
223 } else {
224 obj = do_copy_to_survivor_space(region_attr, obj, m);
225 }
226 RawAccess<IS_NOT_NULL>::oop_store(p, obj);
227
228 write_ref_field_post(p, obj);
229 }
230
231 MAYBE_INLINE_EVACUATION
232 void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stolen) {
233 // Access state before release by claim().
234 objArrayOop to_array = objArrayOop(state->destination());
235 PartialArraySplitter::Claim claim =
236 _partial_array_splitter.claim(state, _task_queue, stolen);
237 G1HeapRegionAttr dest_attr = _g1h->region_attr(to_array);
238 G1SkipCardMarkSetter x(&_scanner, dest_attr.is_new_survivor());
239 // Process claimed task.
240 assert(to_array->is_refArray(), "Must be");
241 refArrayOop(to_array)->oop_iterate_elements_range(&_scanner,
242 checked_cast<int>(claim._start),
243 checked_cast<int>(claim._end));
244 }
245
246 MAYBE_INLINE_EVACUATION
247 void G1ParScanThreadState::start_partial_objarray(oop from_obj,
248 oop to_obj) {
249 assert(from_obj->is_forwarded(), "precondition");
250 assert(from_obj->forwardee() == to_obj, "precondition");
251 assert(to_obj->is_objArray(), "precondition");
252
253 objArrayOop to_array = objArrayOop(to_obj);
254 size_t array_length = to_array->length();
255 size_t initial_chunk_size =
256 // The source array is unused when processing states.
257 _partial_array_splitter.start(_task_queue, nullptr, to_array, array_length);
258
259 assert(_scanner.skip_card_mark_set(), "must be");
260 // Process the initial chunk. No need to process the type in the
261 // klass, as it will already be handled by processing the built-in
262 // module.
263 assert(to_array->is_refArray(), "Must be");
264 refArrayOop(to_array)->oop_iterate_elements_range(&_scanner, 0, checked_cast<int>(initial_chunk_size));
265 }
266
267 MAYBE_INLINE_EVACUATION
268 void G1ParScanThreadState::dispatch_task(ScannerTask task, bool stolen) {
269 verify_task(task);
270 if (task.is_narrow_oop_ptr()) {
271 do_oop_evac(task.to_narrow_oop_ptr());
272 } else if (task.is_oop_ptr()) {
273 do_oop_evac(task.to_oop_ptr());
274 } else {
275 do_partial_array(task.to_partial_array_state(), stolen);
276 }
277 }
278
279 // Process tasks until overflow queue is empty and local queue
280 // contains no more than threshold entries. NOINLINE to prevent
281 // inlining into steal_and_trim_queue.
282 ATTRIBUTE_FLATTEN NOINLINE
283 void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
284 ScannerTask task;
415 size_t word_sz,
416 uint node_index) {
417 _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
418 }
419
420 void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
421 HeapWord* obj_start = cast_from_oop<HeapWord*>(obj);
422 G1HeapRegion* region = _g1h->heap_region_containing(obj_start);
423 region->update_bot_for_block(obj_start, obj_start + word_sz);
424 }
425
426 ALWAYSINLINE
427 void G1ParScanThreadState::do_iterate_object(oop const obj,
428 oop const old,
429 Klass* const klass,
430 G1HeapRegionAttr const region_attr,
431 G1HeapRegionAttr const dest_attr,
432 uint age) {
433 // Most objects are not arrays, so do one array check rather than
434 // checking for each array category for each object.
435 if (klass->is_array_klass() && !klass->is_flatArray_klass()) {
436 assert(!klass->is_stack_chunk_instance_klass(), "must be");
437
438 if (klass->is_refArray_klass()) {
439 start_partial_objarray(old, obj);
440 } else {
441 // Nothing needs to be done for typeArrays. Body doesn't contain
442 // any oops to scan, and the type in the klass will already be handled
443 // by processing the built-in module.
444 assert(klass->is_typeArray_klass() || klass->is_objArray_klass(), "invariant");
445 }
446 return;
447 }
448
449 ContinuationGCSupport::transform_stack_chunk(obj);
450
451 // Check for deduplicating young Strings.
452 if (G1StringDedup::is_candidate_from_evacuation(klass,
453 region_attr,
454 dest_attr,
455 age)) {
456 // Record old; request adds a new weak reference, which reference
457 // processing expects to refer to a from-space object.
458 _string_dedup_requests.add(old);
459 }
460
461 assert(_scanner.skip_card_mark_set(), "must be");
462 obj->oop_iterate_backwards(&_scanner, klass);
463 }
464
|