1809
1810 // Patch the compiled method so that when execution returns to it we will
1811 // deopt the execution state and return to the interpreter.
1812 fr.deoptimize(thread);
1813 }
1814
1815 void Deoptimization::deoptimize(JavaThread* thread, frame fr, DeoptReason reason) {
1816 // Deoptimize only if the frame comes from compile code.
1817 // Do not deoptimize the frame which is already patched
1818 // during the execution of the loops below.
1819 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) {
1820 return;
1821 }
1822 ResourceMark rm;
1823 deoptimize_single_frame(thread, fr, reason);
1824 }
1825
1826 #if INCLUDE_JVMCI
1827 address Deoptimization::deoptimize_for_missing_exception_handler(nmethod* nm) {
1828 // there is no exception handler for this pc => deoptimize
1829 nm->make_not_entrant(nmethod::ChangeReason::missing_exception_handler);
1830
1831 // Use Deoptimization::deoptimize for all of its side-effects:
1832 // gathering traps statistics, logging...
1833 // it also patches the return pc but we do not care about that
1834 // since we return a continuation to the deopt_blob below.
1835 JavaThread* thread = JavaThread::current();
1836 RegisterMap reg_map(thread,
1837 RegisterMap::UpdateMap::skip,
1838 RegisterMap::ProcessFrames::include,
1839 RegisterMap::WalkContinuation::skip);
1840 frame runtime_frame = thread->last_frame();
1841 frame caller_frame = runtime_frame.sender(®_map);
1842 assert(caller_frame.cb()->as_nmethod_or_null() == nm, "expect top frame compiled method");
1843 vframe* vf = vframe::new_vframe(&caller_frame, ®_map, thread);
1844 compiledVFrame* cvf = compiledVFrame::cast(vf);
1845 ScopeDesc* imm_scope = cvf->scope();
1846 MethodData* imm_mdo = get_method_data(thread, methodHandle(thread, imm_scope->method()), true);
1847 if (imm_mdo != nullptr) {
1848 // Lock to read ProfileData, and ensure lock is not broken by a safepoint
1849 MutexLocker ml(imm_mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag);
2438 // This will allow the compiler to see the limit overflow, and
2439 // take corrective action, if possible.
2440 // (This condition is an unlikely backstop only, because the
2441 // PerBytecodeTrapLimit is more likely to take effect first,
2442 // if it is applicable.)
2443 make_not_entrant = true;
2444 }
2445
2446 // Here's more hysteresis: If there has been a recompile at
2447 // this trap point already, run the method in the interpreter
2448 // for a while to exercise it more thoroughly.
2449 if (make_not_entrant && maybe_prior_recompile && maybe_prior_trap) {
2450 reprofile = true;
2451 }
2452 }
2453
2454 // Take requested actions on the method:
2455
2456 // Recompile
2457 if (make_not_entrant) {
2458 if (!nm->make_not_entrant(nmethod::ChangeReason::uncommon_trap)) {
2459 return; // the call did not change nmethod's state
2460 }
2461
2462 if (pdata != nullptr) {
2463 // Record the recompilation event, if any.
2464 int tstate0 = pdata->trap_state();
2465 int tstate1 = trap_state_set_recompiled(tstate0, true);
2466 if (tstate1 != tstate0)
2467 pdata->set_trap_state(tstate1);
2468 }
2469
2470 // For code aging we count traps separately here, using make_not_entrant()
2471 // as a guard against simultaneous deopts in multiple threads.
2472 if (reason == Reason_tenured && trap_mdo != nullptr) {
2473 trap_mdo->inc_tenure_traps();
2474 }
2475 }
2476
2477 if (inc_recompile_count) {
2478 trap_mdo->inc_overflow_recompile_count();
|
1809
1810 // Patch the compiled method so that when execution returns to it we will
1811 // deopt the execution state and return to the interpreter.
1812 fr.deoptimize(thread);
1813 }
1814
1815 void Deoptimization::deoptimize(JavaThread* thread, frame fr, DeoptReason reason) {
1816 // Deoptimize only if the frame comes from compile code.
1817 // Do not deoptimize the frame which is already patched
1818 // during the execution of the loops below.
1819 if (!fr.is_compiled_frame() || fr.is_deoptimized_frame()) {
1820 return;
1821 }
1822 ResourceMark rm;
1823 deoptimize_single_frame(thread, fr, reason);
1824 }
1825
1826 #if INCLUDE_JVMCI
1827 address Deoptimization::deoptimize_for_missing_exception_handler(nmethod* nm) {
1828 // there is no exception handler for this pc => deoptimize
1829 nm->make_not_entrant(nmethod::InvalidationReason::MISSING_EXCEPTION_HANDLER);
1830
1831 // Use Deoptimization::deoptimize for all of its side-effects:
1832 // gathering traps statistics, logging...
1833 // it also patches the return pc but we do not care about that
1834 // since we return a continuation to the deopt_blob below.
1835 JavaThread* thread = JavaThread::current();
1836 RegisterMap reg_map(thread,
1837 RegisterMap::UpdateMap::skip,
1838 RegisterMap::ProcessFrames::include,
1839 RegisterMap::WalkContinuation::skip);
1840 frame runtime_frame = thread->last_frame();
1841 frame caller_frame = runtime_frame.sender(®_map);
1842 assert(caller_frame.cb()->as_nmethod_or_null() == nm, "expect top frame compiled method");
1843 vframe* vf = vframe::new_vframe(&caller_frame, ®_map, thread);
1844 compiledVFrame* cvf = compiledVFrame::cast(vf);
1845 ScopeDesc* imm_scope = cvf->scope();
1846 MethodData* imm_mdo = get_method_data(thread, methodHandle(thread, imm_scope->method()), true);
1847 if (imm_mdo != nullptr) {
1848 // Lock to read ProfileData, and ensure lock is not broken by a safepoint
1849 MutexLocker ml(imm_mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag);
2438 // This will allow the compiler to see the limit overflow, and
2439 // take corrective action, if possible.
2440 // (This condition is an unlikely backstop only, because the
2441 // PerBytecodeTrapLimit is more likely to take effect first,
2442 // if it is applicable.)
2443 make_not_entrant = true;
2444 }
2445
2446 // Here's more hysteresis: If there has been a recompile at
2447 // this trap point already, run the method in the interpreter
2448 // for a while to exercise it more thoroughly.
2449 if (make_not_entrant && maybe_prior_recompile && maybe_prior_trap) {
2450 reprofile = true;
2451 }
2452 }
2453
2454 // Take requested actions on the method:
2455
2456 // Recompile
2457 if (make_not_entrant) {
2458 if (!nm->make_not_entrant(nmethod::InvalidationReason::UNCOMMON_TRAP)) {
2459 return; // the call did not change nmethod's state
2460 }
2461
2462 if (pdata != nullptr) {
2463 // Record the recompilation event, if any.
2464 int tstate0 = pdata->trap_state();
2465 int tstate1 = trap_state_set_recompiled(tstate0, true);
2466 if (tstate1 != tstate0)
2467 pdata->set_trap_state(tstate1);
2468 }
2469
2470 // For code aging we count traps separately here, using make_not_entrant()
2471 // as a guard against simultaneous deopts in multiple threads.
2472 if (reason == Reason_tenured && trap_mdo != nullptr) {
2473 trap_mdo->inc_tenure_traps();
2474 }
2475 }
2476
2477 if (inc_recompile_count) {
2478 trap_mdo->inc_overflow_recompile_count();
|