< prev index next >

src/hotspot/share/gc/g1/g1ParScanThreadState.cpp

Print this page

429 NOINLINE
430 void G1ParScanThreadState::undo_allocation(G1HeapRegionAttr dest_attr,
431                                            HeapWord* obj_ptr,
432                                            size_t word_sz,
433                                            uint node_index) {
434   _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
435 }
436 
437 // Private inline function, for direct internal use and providing the
438 // implementation of the public not-inline function.
439 MAYBE_INLINE_EVACUATION
440 oop G1ParScanThreadState::do_copy_to_survivor_space(G1HeapRegionAttr const region_attr,
441                                                     oop const old,
442                                                     markWord const old_mark) {
443   assert(region_attr.is_in_cset(),
444          "Unexpected region attr type: %s", region_attr.get_type_str());
445 
446   // Get the klass once.  We'll need it again later, and this avoids
447   // re-decoding when it's compressed.
448   Klass* klass = old->klass();
449   const size_t word_sz = old->size_given_klass(klass);
450 
451   uint age = 0;
452   G1HeapRegionAttr dest_attr = next_region_attr(region_attr, old_mark, age);
453   HeapRegion* const from_region = _g1h->heap_region_containing(old);
454   uint node_index = from_region->node_index();
455 
456   HeapWord* obj_ptr = _plab_allocator->plab_allocate(dest_attr, word_sz, node_index);
457 
458   // PLAB allocations should succeed most of the time, so we'll
459   // normally check against NULL once and that's it.
460   if (obj_ptr == NULL) {
461     obj_ptr = allocate_copy_slow(&dest_attr, old, word_sz, age, node_index);
462     if (obj_ptr == NULL) {
463       // This will either forward-to-self, or detect that someone else has
464       // installed a forwarding pointer.
465       return handle_evacuation_failure_par(old, old_mark, word_sz);
466     }
467   }
468 
469   assert(obj_ptr != NULL, "when we get here, allocation should have succeeded");
470   assert(_g1h->is_in_reserved(obj_ptr), "Allocated memory should be in the heap");
471 
472   // Should this evacuation fail?
473   if (inject_evacuation_failure()) {
474     // Doing this after all the allocation attempts also tests the
475     // undo_allocation() method too.
476     undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
477     return handle_evacuation_failure_par(old, old_mark, word_sz);
478   }
479 
480   // We're going to allocate linearly, so might as well prefetch ahead.
481   Prefetch::write(obj_ptr, PrefetchCopyIntervalInBytes);
482   Copy::aligned_disjoint_words(cast_from_oop<HeapWord*>(old), obj_ptr, word_sz);
483 
484   const oop obj = cast_to_oop(obj_ptr);
485   const oop forward_ptr = old->forward_to_atomic(obj, old_mark, memory_order_relaxed);
486   if (forward_ptr == NULL) {
487 
488     {
489       const uint young_index = from_region->young_index_in_cset();
490       assert((from_region->is_young() && young_index >  0) ||
491              (!from_region->is_young() && young_index == 0), "invariant" );
492       _surviving_young_words[young_index] += word_sz;
493     }
494 
495     if (dest_attr.is_young()) {
496       if (age < markWord::max_age) {
497         age++;
498         obj->incr_age();
499       }
500       _age_table.add(age, word_sz);
501     }
502 

429 NOINLINE
430 void G1ParScanThreadState::undo_allocation(G1HeapRegionAttr dest_attr,
431                                            HeapWord* obj_ptr,
432                                            size_t word_sz,
433                                            uint node_index) {
434   _plab_allocator->undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
435 }
436 
437 // Private inline function, for direct internal use and providing the
438 // implementation of the public not-inline function.
439 MAYBE_INLINE_EVACUATION
440 oop G1ParScanThreadState::do_copy_to_survivor_space(G1HeapRegionAttr const region_attr,
441                                                     oop const old,
442                                                     markWord const old_mark) {
443   assert(region_attr.is_in_cset(),
444          "Unexpected region attr type: %s", region_attr.get_type_str());
445 
446   // Get the klass once.  We'll need it again later, and this avoids
447   // re-decoding when it's compressed.
448   Klass* klass = old->klass();
449   const size_t word_sz = old->compact_size_given_klass(klass);
450 
451   uint age = 0;
452   G1HeapRegionAttr dest_attr = next_region_attr(region_attr, old_mark, age);
453   HeapRegion* const from_region = _g1h->heap_region_containing(old);
454   uint node_index = from_region->node_index();
455 
456   HeapWord* obj_ptr = _plab_allocator->plab_allocate(dest_attr, word_sz, node_index);
457 
458   // PLAB allocations should succeed most of the time, so we'll
459   // normally check against NULL once and that's it.
460   if (obj_ptr == NULL) {
461     obj_ptr = allocate_copy_slow(&dest_attr, old, word_sz, age, node_index);
462     if (obj_ptr == NULL) {
463       // This will either forward-to-self, or detect that someone else has
464       // installed a forwarding pointer.
465       return handle_evacuation_failure_par(old, old_mark, word_sz);
466     }
467   }
468 
469   assert(obj_ptr != NULL, "when we get here, allocation should have succeeded");
470   assert(_g1h->is_in_reserved(obj_ptr), "Allocated memory should be in the heap");
471 
472   // Should this evacuation fail?
473   if (inject_evacuation_failure()) {
474     // Doing this after all the allocation attempts also tests the
475     // undo_allocation() method too.
476     undo_allocation(dest_attr, obj_ptr, word_sz, node_index);
477     return handle_evacuation_failure_par(old, old_mark, word_sz);
478   }
479 
480   // We're going to allocate linearly, so might as well prefetch ahead.
481   Prefetch::write(obj_ptr, PrefetchCopyIntervalInBytes);
482   old->copy_disjoint_compact(obj_ptr, word_sz);
483 
484   const oop obj = cast_to_oop(obj_ptr);
485   const oop forward_ptr = old->forward_to_atomic(obj, old_mark, memory_order_relaxed);
486   if (forward_ptr == NULL) {
487 
488     {
489       const uint young_index = from_region->young_index_in_cset();
490       assert((from_region->is_young() && young_index >  0) ||
491              (!from_region->is_young() && young_index == 0), "invariant" );
492       _surviving_young_words[young_index] += word_sz;
493     }
494 
495     if (dest_attr.is_young()) {
496       if (age < markWord::max_age) {
497         age++;
498         obj->incr_age();
499       }
500       _age_table.add(age, word_sz);
501     }
502 
< prev index next >