221 markWord m = obj->mark();
222 if (m.is_forwarded()) {
223 obj = obj->forwardee(m);
224 } else {
225 obj = do_copy_to_survivor_space(region_attr, obj, m);
226 }
227 RawAccess<IS_NOT_NULL>::oop_store(p, obj);
228
229 write_ref_field_post(p, obj);
230 }
231
232 MAYBE_INLINE_EVACUATION
233 void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stolen) {
234 // Access state before release by claim().
235 objArrayOop to_array = objArrayOop(state->destination());
236 PartialArraySplitter::Claim claim =
237 _partial_array_splitter.claim(state, _task_queue, stolen);
238 G1HeapRegionAttr dest_attr = _g1h->region_attr(to_array);
239 G1SkipCardMarkSetter x(&_scanner, dest_attr.is_new_survivor());
240 // Process claimed task.
241 to_array->oop_iterate_range(&_scanner,
242 checked_cast<int>(claim._start),
243 checked_cast<int>(claim._end));
244 }
245
246 MAYBE_INLINE_EVACUATION
247 void G1ParScanThreadState::start_partial_objarray(oop from_obj,
248 oop to_obj) {
249 assert(from_obj->is_forwarded(), "precondition");
250 assert(from_obj->forwardee() == to_obj, "precondition");
251 assert(to_obj->is_objArray(), "precondition");
252
253 objArrayOop to_array = objArrayOop(to_obj);
254 size_t array_length = to_array->length();
255 size_t initial_chunk_size =
256 // The source array is unused when processing states.
257 _partial_array_splitter.start(_task_queue, nullptr, to_array, array_length);
258
259 assert(_scanner.skip_card_mark_set(), "must be");
260 // Process the initial chunk. No need to process the type in the
261 // klass, as it will already be handled by processing the built-in
262 // module.
263 to_array->oop_iterate_range(&_scanner, 0, checked_cast<int>(initial_chunk_size));
264 }
265
266 MAYBE_INLINE_EVACUATION
267 void G1ParScanThreadState::dispatch_task(ScannerTask task, bool stolen) {
268 verify_task(task);
269 if (task.is_narrow_oop_ptr()) {
270 do_oop_evac(task.to_narrow_oop_ptr());
271 } else if (task.is_oop_ptr()) {
272 do_oop_evac(task.to_oop_ptr());
273 } else {
274 do_partial_array(task.to_partial_array_state(), stolen);
275 }
276 }
277
278 // Process tasks until overflow queue is empty and local queue
279 // contains no more than threshold entries. NOINLINE to prevent
280 // inlining into steal_and_trim_queue.
281 ATTRIBUTE_FLATTEN NOINLINE
282 void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
283 ScannerTask task;
414 size_t word_sz,
415 uint node_index) {
416 _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
417 }
418
419 void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
420 HeapWord* obj_start = cast_from_oop<HeapWord*>(obj);
421 G1HeapRegion* region = _g1h->heap_region_containing(obj_start);
422 region->update_bot_for_block(obj_start, obj_start + word_sz);
423 }
424
425 ALWAYSINLINE
426 void G1ParScanThreadState::do_iterate_object(oop const obj,
427 oop const old,
428 Klass* const klass,
429 G1HeapRegionAttr const region_attr,
430 G1HeapRegionAttr const dest_attr,
431 uint age) {
432 // Most objects are not arrays, so do one array check rather than
433 // checking for each array category for each object.
434 if (klass->is_array_klass()) {
435 assert(!klass->is_stack_chunk_instance_klass(), "must be");
436
437 if (klass->is_objArray_klass()) {
438 start_partial_objarray(old, obj);
439 } else {
440 // Nothing needs to be done for typeArrays. Body doesn't contain
441 // any oops to scan, and the type in the klass will already be handled
442 // by processing the built-in module.
443 assert(klass->is_typeArray_klass(), "invariant");
444 }
445 return;
446 }
447
448 ContinuationGCSupport::transform_stack_chunk(obj);
449
450 // Check for deduplicating young Strings.
451 if (G1StringDedup::is_candidate_from_evacuation(klass,
452 region_attr,
453 dest_attr,
454 age)) {
455 // Record old; request adds a new weak reference, which reference
456 // processing expects to refer to a from-space object.
457 _string_dedup_requests.add(old);
458 }
459
460 assert(_scanner.skip_card_mark_set(), "must be");
461 obj->oop_iterate_backwards(&_scanner, klass);
462 }
463
|
221 markWord m = obj->mark();
222 if (m.is_forwarded()) {
223 obj = obj->forwardee(m);
224 } else {
225 obj = do_copy_to_survivor_space(region_attr, obj, m);
226 }
227 RawAccess<IS_NOT_NULL>::oop_store(p, obj);
228
229 write_ref_field_post(p, obj);
230 }
231
232 MAYBE_INLINE_EVACUATION
233 void G1ParScanThreadState::do_partial_array(PartialArrayState* state, bool stolen) {
234 // Access state before release by claim().
235 objArrayOop to_array = objArrayOop(state->destination());
236 PartialArraySplitter::Claim claim =
237 _partial_array_splitter.claim(state, _task_queue, stolen);
238 G1HeapRegionAttr dest_attr = _g1h->region_attr(to_array);
239 G1SkipCardMarkSetter x(&_scanner, dest_attr.is_new_survivor());
240 // Process claimed task.
241 assert(to_array->is_refArray(), "Must be");
242 refArrayOop(to_array)->oop_iterate_range(&_scanner,
243 checked_cast<int>(claim._start),
244 checked_cast<int>(claim._end));
245 }
246
247 MAYBE_INLINE_EVACUATION
248 void G1ParScanThreadState::start_partial_objarray(oop from_obj,
249 oop to_obj) {
250 assert(from_obj->is_forwarded(), "precondition");
251 assert(from_obj->forwardee() == to_obj, "precondition");
252 assert(to_obj->is_objArray(), "precondition");
253
254 objArrayOop to_array = objArrayOop(to_obj);
255 size_t array_length = to_array->length();
256 size_t initial_chunk_size =
257 // The source array is unused when processing states.
258 _partial_array_splitter.start(_task_queue, nullptr, to_array, array_length);
259
260 assert(_scanner.skip_card_mark_set(), "must be");
261 // Process the initial chunk. No need to process the type in the
262 // klass, as it will already be handled by processing the built-in
263 // module.
264 assert(to_array->is_refArray(), "Must be");
265 refArrayOop(to_array)->oop_iterate_range(&_scanner, 0, checked_cast<int>(initial_chunk_size));
266 }
267
268 MAYBE_INLINE_EVACUATION
269 void G1ParScanThreadState::dispatch_task(ScannerTask task, bool stolen) {
270 verify_task(task);
271 if (task.is_narrow_oop_ptr()) {
272 do_oop_evac(task.to_narrow_oop_ptr());
273 } else if (task.is_oop_ptr()) {
274 do_oop_evac(task.to_oop_ptr());
275 } else {
276 do_partial_array(task.to_partial_array_state(), stolen);
277 }
278 }
279
280 // Process tasks until overflow queue is empty and local queue
281 // contains no more than threshold entries. NOINLINE to prevent
282 // inlining into steal_and_trim_queue.
283 ATTRIBUTE_FLATTEN NOINLINE
284 void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
285 ScannerTask task;
416 size_t word_sz,
417 uint node_index) {
418 _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
419 }
420
421 void G1ParScanThreadState::update_bot_after_copying(oop obj, size_t word_sz) {
422 HeapWord* obj_start = cast_from_oop<HeapWord*>(obj);
423 G1HeapRegion* region = _g1h->heap_region_containing(obj_start);
424 region->update_bot_for_block(obj_start, obj_start + word_sz);
425 }
426
427 ALWAYSINLINE
428 void G1ParScanThreadState::do_iterate_object(oop const obj,
429 oop const old,
430 Klass* const klass,
431 G1HeapRegionAttr const region_attr,
432 G1HeapRegionAttr const dest_attr,
433 uint age) {
434 // Most objects are not arrays, so do one array check rather than
435 // checking for each array category for each object.
436 if (klass->is_array_klass() && !klass->is_flatArray_klass()) {
437 assert(!klass->is_stack_chunk_instance_klass(), "must be");
438
439 if (klass->is_refArray_klass()) {
440 start_partial_objarray(old, obj);
441 } else {
442 // Nothing needs to be done for typeArrays. Body doesn't contain
443 // any oops to scan, and the type in the klass will already be handled
444 // by processing the built-in module.
445 assert(klass->is_typeArray_klass() || klass->is_objArray_klass(), "invariant");
446 }
447 return;
448 }
449
450 ContinuationGCSupport::transform_stack_chunk(obj);
451
452 // Check for deduplicating young Strings.
453 if (G1StringDedup::is_candidate_from_evacuation(klass,
454 region_attr,
455 dest_attr,
456 age)) {
457 // Record old; request adds a new weak reference, which reference
458 // processing expects to refer to a from-space object.
459 _string_dedup_requests.add(old);
460 }
461
462 assert(_scanner.skip_card_mark_set(), "must be");
463 obj->oop_iterate_backwards(&_scanner, klass);
464 }
465
|