< prev index next >

src/hotspot/share/gc/serial/defNewGeneration.cpp

Print this page

720   _promotion_failed_info.register_copy_failure(old->size());
721 
722   ContinuationGCSupport::transform_stack_chunk(old);
723 
724   // forward to self
725   old->forward_to_self();
726 
727   _promo_failure_scan_stack.push(old);
728 
729   if (!_promo_failure_drain_in_progress) {
730     // prevent recursion in copy_to_survivor_space()
731     _promo_failure_drain_in_progress = true;
732     drain_promo_failure_scan_stack();
733     _promo_failure_drain_in_progress = false;
734   }
735 }
736 
737 oop DefNewGeneration::copy_to_survivor_space(oop old) {
738   assert(is_in_reserved(old) && !old->is_forwarded(),
739          "shouldn't be scavenging this oop");
740   size_t s = old->size();


741   oop obj = nullptr;
742 
743   // Try allocating obj in to-space (unless too old)
744   if (old->age() < tenuring_threshold()) {
745     obj = cast_to_oop(to()->allocate(s));
746   }
747 
748   bool new_obj_is_tenured = false;
749   // Otherwise try allocating obj tenured
750   if (obj == nullptr) {
751     obj = _old_gen->allocate_for_promotion(old, s);
752     if (obj == nullptr) {
753       handle_promotion_failure(old);
754       return old;
755     }
756 
757     new_obj_is_tenured = true;
758   }
759 
760   // Prefetch beyond obj
761   const intx interval = PrefetchCopyIntervalInBytes;
762   Prefetch::write(obj, interval);
763 
764   // Copy obj
765   Copy::aligned_disjoint_words(cast_from_oop<HeapWord*>(old), cast_from_oop<HeapWord*>(obj), s);
766 
767   ContinuationGCSupport::transform_stack_chunk(obj);
768 
769   if (!new_obj_is_tenured) {
770     // Increment age if obj still in new generation
771     obj->incr_age();
772     age_table()->add(obj, s);
773   }
774 


775   // Done, insert forward pointer to obj in this header
776   old->forward_to(obj);
777 
778   if (SerialStringDedup::is_candidate_from_evacuation(obj, new_obj_is_tenured)) {
779     // Record old; request adds a new weak reference, which reference
780     // processing expects to refer to a from-space object.
781     _string_dedup_requests.add(old);
782   }
783   return obj;
784 }
785 
786 void DefNewGeneration::drain_promo_failure_scan_stack() {
787   PromoteFailureClosure cl{this};
788   while (!_promo_failure_scan_stack.is_empty()) {
789      oop obj = _promo_failure_scan_stack.pop();
790      obj->oop_iterate(&cl);
791   }
792 }
793 
794 void DefNewGeneration::contribute_scratch(void*& scratch, size_t& num_words) {

720   _promotion_failed_info.register_copy_failure(old->size());
721 
722   ContinuationGCSupport::transform_stack_chunk(old);
723 
724   // forward to self
725   old->forward_to_self();
726 
727   _promo_failure_scan_stack.push(old);
728 
729   if (!_promo_failure_drain_in_progress) {
730     // prevent recursion in copy_to_survivor_space()
731     _promo_failure_drain_in_progress = true;
732     drain_promo_failure_scan_stack();
733     _promo_failure_drain_in_progress = false;
734   }
735 }
736 
737 oop DefNewGeneration::copy_to_survivor_space(oop old) {
738   assert(is_in_reserved(old) && !old->is_forwarded(),
739          "shouldn't be scavenging this oop");
740   size_t old_size = old->size();
741   size_t s = old->copy_size(old_size, old->mark());
742 
743   oop obj = nullptr;
744 
745   // Try allocating obj in to-space (unless too old)
746   if (old->age() < tenuring_threshold()) {
747     obj = cast_to_oop(to()->allocate(s));
748   }
749 
750   bool new_obj_is_tenured = false;
751   // Otherwise try allocating obj tenured
752   if (obj == nullptr) {
753     obj = _old_gen->allocate_for_promotion(old, s);
754     if (obj == nullptr) {
755       handle_promotion_failure(old);
756       return old;
757     }
758 
759     new_obj_is_tenured = true;
760   }
761 
762   // Prefetch beyond obj
763   const intx interval = PrefetchCopyIntervalInBytes;
764   Prefetch::write(obj, interval);
765 
766   // Copy obj
767   Copy::aligned_disjoint_words(cast_from_oop<HeapWord*>(old), cast_from_oop<HeapWord*>(obj), old_size);
768 
769   ContinuationGCSupport::transform_stack_chunk(obj);
770 
771   if (!new_obj_is_tenured) {
772     // Increment age if obj still in new generation
773     obj->incr_age();
774     age_table()->add(obj, s);
775   }
776 
777   obj->initialize_hash_if_necessary(old);
778 
779   // Done, insert forward pointer to obj in this header
780   old->forward_to(obj);
781 
782   if (SerialStringDedup::is_candidate_from_evacuation(obj, new_obj_is_tenured)) {
783     // Record old; request adds a new weak reference, which reference
784     // processing expects to refer to a from-space object.
785     _string_dedup_requests.add(old);
786   }
787   return obj;
788 }
789 
790 void DefNewGeneration::drain_promo_failure_scan_stack() {
791   PromoteFailureClosure cl{this};
792   while (!_promo_failure_scan_stack.is_empty()) {
793      oop obj = _promo_failure_scan_stack.pop();
794      obj->oop_iterate(&cl);
795   }
796 }
797 
798 void DefNewGeneration::contribute_scratch(void*& scratch, size_t& num_words) {
< prev index next >