< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page

 373     if (TraceDeoptimization) {
 374       print_objects(deoptee_thread, objects, realloc_failures);
 375     }
 376   }
 377   if (save_oop_result) {
 378     // Restore result.
 379     deoptee.set_saved_oop_result(&map, return_value());
 380   }
 381   return realloc_failures;
 382 }
 383 
 384 static void restore_eliminated_locks(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures,
 385                                      frame& deoptee, int exec_mode, bool& deoptimized_objects) {
 386   JavaThread* deoptee_thread = chunk->at(0)->thread();
 387   assert(!EscapeBarrier::objs_are_deoptimized(deoptee_thread, deoptee.id()), "must relock just once");
 388   assert(thread == Thread::current(), "should be");
 389   HandleMark hm(thread);
 390 #ifndef PRODUCT
 391   bool first = true;
 392 #endif // !PRODUCT
 393   for (int i = 0; i < chunk->length(); i++) {


 394     compiledVFrame* cvf = chunk->at(i);
 395     assert (cvf->scope() != nullptr,"expect only compiled java frames");
 396     GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 397     if (monitors->is_nonempty()) {
 398       bool relocked = Deoptimization::relock_objects(thread, monitors, deoptee_thread, deoptee,
 399                                                      exec_mode, realloc_failures);
 400       deoptimized_objects = deoptimized_objects || relocked;







 401 #ifndef PRODUCT
 402       if (PrintDeoptimizationDetails) {
 403         ResourceMark rm;
 404         stringStream st;
 405         for (int j = 0; j < monitors->length(); j++) {
 406           MonitorInfo* mi = monitors->at(j);
 407           if (mi->eliminated()) {
 408             if (first) {
 409               first = false;
 410               st.print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 411             }
 412             if (exec_mode == Deoptimization::Unpack_none) {
 413               ObjectMonitor* monitor = deoptee_thread->current_waiting_monitor();
 414               if (monitor != nullptr && monitor->object() == mi->owner()) {
 415                 st.print_cr("     object <" INTPTR_FORMAT "> DEFERRED relocking after wait", p2i(mi->owner()));
 416                 continue;
 417               }
 418             }
 419             if (mi->owner_is_scalar_replaced()) {
 420               Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
 421               st.print_cr("     failed reallocation for klass %s", k->external_name());
 422             } else {
 423               st.print_cr("     object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
 424             }
 425           }
 426         }
 427         tty->print_raw(st.freeze());
 428       }
 429 #endif // !PRODUCT
 430     }
 431   }





 432 }
 433 
 434 // Deoptimize objects, that is reallocate and relock them, just before they escape through JVMTI.
 435 // The given vframes cover one physical frame.
 436 bool Deoptimization::deoptimize_objects_internal(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk,
 437                                                  bool& realloc_failures) {
 438   frame deoptee = chunk->at(0)->fr();
 439   JavaThread* deoptee_thread = chunk->at(0)->thread();
 440   CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 441   RegisterMap map(chunk->at(0)->register_map());
 442   bool deoptimized_objects = false;
 443 
 444   bool const jvmci_enabled = JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false);
 445 
 446   // Reallocate the non-escaping objects and restore their fields.
 447   if (jvmci_enabled COMPILER2_PRESENT(|| (DoEscapeAnalysis && EliminateAllocations)
 448                                       || EliminateAutoBox || EnableVectorAggressiveReboxing)) {
 449     realloc_failures = rematerialize_objects(thread, Unpack_none, cm, deoptee, map, chunk, deoptimized_objects);
 450   }
 451 

1636         markWord mark = obj->mark();
1637         if (exec_mode == Unpack_none) {
1638           if (LockingMode == LM_LEGACY && mark.has_locker() && fr.sp() > (intptr_t*)mark.locker()) {
1639             // With exec_mode == Unpack_none obj may be thread local and locked in
1640             // a callee frame. Make the lock in the callee a recursive lock and restore the displaced header.
1641             markWord dmw = mark.displaced_mark_helper();
1642             mark.locker()->set_displaced_header(markWord::encode((BasicLock*) nullptr));
1643             obj->set_mark(dmw);
1644           }
1645           if (mark.has_monitor()) {
1646             // defer relocking if the deoptee thread is currently waiting for obj
1647             ObjectMonitor* waiting_monitor = deoptee_thread->current_waiting_monitor();
1648             if (waiting_monitor != nullptr && waiting_monitor->object() == obj()) {
1649               assert(fr.is_deoptimized_frame(), "frame must be scheduled for deoptimization");
1650               mon_info->lock()->set_displaced_header(markWord::unused_mark());
1651               JvmtiDeferredUpdates::inc_relock_count_after_wait(deoptee_thread);
1652               continue;
1653             }
1654           }
1655         }
1656         if (LockingMode == LM_LIGHTWEIGHT && exec_mode == Unpack_none) {
1657           // We have lost information about the correct state of the lock stack.
1658           // Inflate the locks instead. Enter then inflate to avoid races with
1659           // deflation.
1660           ObjectSynchronizer::enter(obj, nullptr, deoptee_thread);
1661           assert(mon_info->owner()->is_locked(), "object must be locked now");
1662           ObjectMonitor* mon = ObjectSynchronizer::inflate(deoptee_thread, obj(), ObjectSynchronizer::inflate_cause_vm_internal);
1663           assert(mon->owner() == deoptee_thread, "must be");
1664         } else {
1665           BasicLock* lock = mon_info->lock();
1666           ObjectSynchronizer::enter(obj, lock, deoptee_thread);
1667           assert(mon_info->owner()->is_locked(), "object must be locked now");
1668         }
1669       }
1670     }
1671   }
1672   return relocked_objects;
1673 }
1674 #endif // COMPILER2_OR_JVMCI
1675 
1676 vframeArray* Deoptimization::create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures) {
1677   Events::log_deopt_message(thread, "DEOPT PACKING pc=" INTPTR_FORMAT " sp=" INTPTR_FORMAT, p2i(fr.pc()), p2i(fr.sp()));
1678 
1679   // Register map for next frame (used for stack crawl).  We capture
1680   // the state of the deopt'ing frame's caller.  Thus if we need to
1681   // stuff a C2I adapter we can properly fill in the callee-save
1682   // register locations.
1683   frame caller = fr.sender(reg_map);
1684   int frame_size = caller.sp() - fr.sp();
1685 
1686   frame sender = caller;

1719     tty->print_raw(st.freeze());
1720     tty->cr();
1721   }
1722 
1723   return array;
1724 }
1725 
1726 #if COMPILER2_OR_JVMCI
1727 void Deoptimization::pop_frames_failed_reallocs(JavaThread* thread, vframeArray* array) {
1728   // Reallocation of some scalar replaced objects failed. Record
1729   // that we need to pop all the interpreter frames for the
1730   // deoptimized compiled frame.
1731   assert(thread->frames_to_pop_failed_realloc() == 0, "missed frames to pop?");
1732   thread->set_frames_to_pop_failed_realloc(array->frames());
1733   // Unlock all monitors here otherwise the interpreter will see a
1734   // mix of locked and unlocked monitors (because of failed
1735   // reallocations of synchronized objects) and be confused.
1736   for (int i = 0; i < array->frames(); i++) {
1737     MonitorChunk* monitors = array->element(i)->monitors();
1738     if (monitors != nullptr) {
1739       for (int j = 0; j < monitors->number_of_monitors(); j++) {

1740         BasicObjectLock* src = monitors->at(j);
1741         if (src->obj() != nullptr) {
1742           ObjectSynchronizer::exit(src->obj(), src->lock(), thread);
1743         }
1744       }
1745       array->element(i)->free_monitors(thread);
1746 #ifdef ASSERT
1747       array->element(i)->set_removed_monitors();
1748 #endif
1749     }
1750   }
1751 }
1752 #endif
1753 
1754 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1755   assert(fr.can_be_deoptimized(), "checking frame type");
1756 
1757   gather_statistics(reason, Action_none, Bytecodes::_illegal);
1758 
1759   if (LogCompilation && xtty != nullptr) {

 373     if (TraceDeoptimization) {
 374       print_objects(deoptee_thread, objects, realloc_failures);
 375     }
 376   }
 377   if (save_oop_result) {
 378     // Restore result.
 379     deoptee.set_saved_oop_result(&map, return_value());
 380   }
 381   return realloc_failures;
 382 }
 383 
 384 static void restore_eliminated_locks(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures,
 385                                      frame& deoptee, int exec_mode, bool& deoptimized_objects) {
 386   JavaThread* deoptee_thread = chunk->at(0)->thread();
 387   assert(!EscapeBarrier::objs_are_deoptimized(deoptee_thread, deoptee.id()), "must relock just once");
 388   assert(thread == Thread::current(), "should be");
 389   HandleMark hm(thread);
 390 #ifndef PRODUCT
 391   bool first = true;
 392 #endif // !PRODUCT
 393   DEBUG_ONLY(GrowableArray<oop> lock_order{0};)
 394   // Start locking from outermost/oldest frame
 395   for (int i = (chunk->length() - 1); i >= 0; i--) {
 396     compiledVFrame* cvf = chunk->at(i);
 397     assert (cvf->scope() != nullptr,"expect only compiled java frames");
 398     GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 399     if (monitors->is_nonempty()) {
 400       bool relocked = Deoptimization::relock_objects(thread, monitors, deoptee_thread, deoptee,
 401                                                      exec_mode, realloc_failures);
 402       deoptimized_objects = deoptimized_objects || relocked;
 403 #ifdef ASSERT
 404       if (LockingMode == LM_LIGHTWEIGHT && !realloc_failures) {
 405         for (MonitorInfo* mi : *monitors) {
 406           lock_order.push(mi->owner());
 407         }
 408       }
 409 #endif // ASSERT
 410 #ifndef PRODUCT
 411       if (PrintDeoptimizationDetails) {
 412         ResourceMark rm;
 413         stringStream st;
 414         for (int j = 0; j < monitors->length(); j++) {
 415           MonitorInfo* mi = monitors->at(j);
 416           if (mi->eliminated()) {
 417             if (first) {
 418               first = false;
 419               st.print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 420             }
 421             if (exec_mode == Deoptimization::Unpack_none) {
 422               ObjectMonitor* monitor = deoptee_thread->current_waiting_monitor();
 423               if (monitor != nullptr && monitor->object() == mi->owner()) {
 424                 st.print_cr("     object <" INTPTR_FORMAT "> DEFERRED relocking after wait", p2i(mi->owner()));
 425                 continue;
 426               }
 427             }
 428             if (mi->owner_is_scalar_replaced()) {
 429               Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
 430               st.print_cr("     failed reallocation for klass %s", k->external_name());
 431             } else {
 432               st.print_cr("     object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
 433             }
 434           }
 435         }
 436         tty->print_raw(st.freeze());
 437       }
 438 #endif // !PRODUCT
 439     }
 440   }
 441 #ifdef ASSERT
 442   if (LockingMode == LM_LIGHTWEIGHT && !realloc_failures) {
 443     deoptee_thread->lock_stack().verify_consistent_lock_order(lock_order, exec_mode != Deoptimization::Unpack_none);
 444   }
 445 #endif // ASSERT
 446 }
 447 
 448 // Deoptimize objects, that is reallocate and relock them, just before they escape through JVMTI.
 449 // The given vframes cover one physical frame.
 450 bool Deoptimization::deoptimize_objects_internal(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk,
 451                                                  bool& realloc_failures) {
 452   frame deoptee = chunk->at(0)->fr();
 453   JavaThread* deoptee_thread = chunk->at(0)->thread();
 454   CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 455   RegisterMap map(chunk->at(0)->register_map());
 456   bool deoptimized_objects = false;
 457 
 458   bool const jvmci_enabled = JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false);
 459 
 460   // Reallocate the non-escaping objects and restore their fields.
 461   if (jvmci_enabled COMPILER2_PRESENT(|| (DoEscapeAnalysis && EliminateAllocations)
 462                                       || EliminateAutoBox || EnableVectorAggressiveReboxing)) {
 463     realloc_failures = rematerialize_objects(thread, Unpack_none, cm, deoptee, map, chunk, deoptimized_objects);
 464   }
 465 

1650         markWord mark = obj->mark();
1651         if (exec_mode == Unpack_none) {
1652           if (LockingMode == LM_LEGACY && mark.has_locker() && fr.sp() > (intptr_t*)mark.locker()) {
1653             // With exec_mode == Unpack_none obj may be thread local and locked in
1654             // a callee frame. Make the lock in the callee a recursive lock and restore the displaced header.
1655             markWord dmw = mark.displaced_mark_helper();
1656             mark.locker()->set_displaced_header(markWord::encode((BasicLock*) nullptr));
1657             obj->set_mark(dmw);
1658           }
1659           if (mark.has_monitor()) {
1660             // defer relocking if the deoptee thread is currently waiting for obj
1661             ObjectMonitor* waiting_monitor = deoptee_thread->current_waiting_monitor();
1662             if (waiting_monitor != nullptr && waiting_monitor->object() == obj()) {
1663               assert(fr.is_deoptimized_frame(), "frame must be scheduled for deoptimization");
1664               mon_info->lock()->set_displaced_header(markWord::unused_mark());
1665               JvmtiDeferredUpdates::inc_relock_count_after_wait(deoptee_thread);
1666               continue;
1667             }
1668           }
1669         }
1670         if (LockingMode == LM_LIGHTWEIGHT) {
1671           // We have lost information about the correct state of the lock stack.
1672           // Inflate the locks instead. Enter then inflate to avoid races with
1673           // deflation.
1674           ObjectSynchronizer::enter_for(obj, nullptr, deoptee_thread);
1675           assert(mon_info->owner()->is_locked(), "object must be locked now");
1676           ObjectMonitor* mon = ObjectSynchronizer::inflate_for(deoptee_thread, obj(), ObjectSynchronizer::inflate_cause_vm_internal);
1677           assert(mon->owner() == deoptee_thread, "must be");
1678         } else {
1679           BasicLock* lock = mon_info->lock();
1680           ObjectSynchronizer::enter_for(obj, lock, deoptee_thread);
1681           assert(mon_info->owner()->is_locked(), "object must be locked now");
1682         }
1683       }
1684     }
1685   }
1686   return relocked_objects;
1687 }
1688 #endif // COMPILER2_OR_JVMCI
1689 
1690 vframeArray* Deoptimization::create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures) {
1691   Events::log_deopt_message(thread, "DEOPT PACKING pc=" INTPTR_FORMAT " sp=" INTPTR_FORMAT, p2i(fr.pc()), p2i(fr.sp()));
1692 
1693   // Register map for next frame (used for stack crawl).  We capture
1694   // the state of the deopt'ing frame's caller.  Thus if we need to
1695   // stuff a C2I adapter we can properly fill in the callee-save
1696   // register locations.
1697   frame caller = fr.sender(reg_map);
1698   int frame_size = caller.sp() - fr.sp();
1699 
1700   frame sender = caller;

1733     tty->print_raw(st.freeze());
1734     tty->cr();
1735   }
1736 
1737   return array;
1738 }
1739 
1740 #if COMPILER2_OR_JVMCI
1741 void Deoptimization::pop_frames_failed_reallocs(JavaThread* thread, vframeArray* array) {
1742   // Reallocation of some scalar replaced objects failed. Record
1743   // that we need to pop all the interpreter frames for the
1744   // deoptimized compiled frame.
1745   assert(thread->frames_to_pop_failed_realloc() == 0, "missed frames to pop?");
1746   thread->set_frames_to_pop_failed_realloc(array->frames());
1747   // Unlock all monitors here otherwise the interpreter will see a
1748   // mix of locked and unlocked monitors (because of failed
1749   // reallocations of synchronized objects) and be confused.
1750   for (int i = 0; i < array->frames(); i++) {
1751     MonitorChunk* monitors = array->element(i)->monitors();
1752     if (monitors != nullptr) {
1753       // Unlock in reverse order starting from most nested monitor.
1754       for (int j = (monitors->number_of_monitors() - 1); j >= 0; j--) {
1755         BasicObjectLock* src = monitors->at(j);
1756         if (src->obj() != nullptr) {
1757           ObjectSynchronizer::exit(src->obj(), src->lock(), thread);
1758         }
1759       }
1760       array->element(i)->free_monitors(thread);
1761 #ifdef ASSERT
1762       array->element(i)->set_removed_monitors();
1763 #endif
1764     }
1765   }
1766 }
1767 #endif
1768 
1769 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1770   assert(fr.can_be_deoptimized(), "checking frame type");
1771 
1772   gather_statistics(reason, Action_none, Bytecodes::_illegal);
1773 
1774   if (LogCompilation && xtty != nullptr) {
< prev index next >