63 #include "prims/methodHandles.hpp"
64 #include "prims/vectorSupport.hpp"
65 #include "runtime/atomic.hpp"
66 #include "runtime/basicLock.inline.hpp"
67 #include "runtime/continuation.hpp"
68 #include "runtime/continuationEntry.inline.hpp"
69 #include "runtime/deoptimization.hpp"
70 #include "runtime/escapeBarrier.hpp"
71 #include "runtime/fieldDescriptor.hpp"
72 #include "runtime/fieldDescriptor.inline.hpp"
73 #include "runtime/frame.inline.hpp"
74 #include "runtime/handles.inline.hpp"
75 #include "runtime/interfaceSupport.inline.hpp"
76 #include "runtime/javaThread.hpp"
77 #include "runtime/jniHandles.inline.hpp"
78 #include "runtime/keepStackGCProcessed.hpp"
79 #include "runtime/lightweightSynchronizer.hpp"
80 #include "runtime/lockStack.inline.hpp"
81 #include "runtime/objectMonitor.inline.hpp"
82 #include "runtime/osThread.hpp"
83 #include "runtime/safepointVerifiers.hpp"
84 #include "runtime/sharedRuntime.hpp"
85 #include "runtime/signature.hpp"
86 #include "runtime/stackFrameStream.inline.hpp"
87 #include "runtime/stackValue.hpp"
88 #include "runtime/stackWatermarkSet.hpp"
89 #include "runtime/stubRoutines.hpp"
90 #include "runtime/synchronizer.inline.hpp"
91 #include "runtime/threadSMR.hpp"
92 #include "runtime/threadWXSetters.inline.hpp"
93 #include "runtime/vframe.hpp"
94 #include "runtime/vframeArray.hpp"
95 #include "runtime/vframe_hp.hpp"
96 #include "runtime/vmOperations.hpp"
97 #include "utilities/checkedCast.hpp"
98 #include "utilities/events.hpp"
99 #include "utilities/growableArray.hpp"
100 #include "utilities/macros.hpp"
101 #include "utilities/preserveException.hpp"
102 #include "utilities/xmlstream.hpp"
103 #if INCLUDE_JFR
104 #include "jfr/jfr.inline.hpp"
105 #include "jfr/jfrEvents.hpp"
106 #include "jfr/metadata/jfrSerializer.hpp"
107 #endif
108
109 uint64_t DeoptimizationScope::_committed_deopt_gen = 0;
110 uint64_t DeoptimizationScope::_active_deopt_gen = 1;
111 bool DeoptimizationScope::_committing_in_progress = false;
112
113 DeoptimizationScope::DeoptimizationScope() : _required_gen(0) {
114 DEBUG_ONLY(_deopted = false;)
115
116 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
265 return checked_cast<int>(result);
266 }
267
268 void Deoptimization::UnrollBlock::print() {
269 ResourceMark rm;
270 stringStream st;
271 st.print_cr("UnrollBlock");
272 st.print_cr(" size_of_deoptimized_frame = %d", _size_of_deoptimized_frame);
273 st.print( " frame_sizes: ");
274 for (int index = 0; index < number_of_frames(); index++) {
275 st.print("%zd ", frame_sizes()[index]);
276 }
277 st.cr();
278 tty->print_raw(st.freeze());
279 }
280
281 // In order to make fetch_unroll_info work properly with escape
282 // analysis, the method was changed from JRT_LEAF to JRT_BLOCK_ENTRY.
283 // The actual reallocation of previously eliminated objects occurs in realloc_objects,
284 // which is called from the method fetch_unroll_info_helper below.
285 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* current, int exec_mode))
286 // fetch_unroll_info() is called at the beginning of the deoptimization
287 // handler. Note this fact before we start generating temporary frames
288 // that can confuse an asynchronous stack walker. This counter is
289 // decremented at the end of unpack_frames().
290 current->inc_in_deopt_handler();
291
292 if (exec_mode == Unpack_exception) {
293 // When we get here, a callee has thrown an exception into a deoptimized
294 // frame. That throw might have deferred stack watermark checking until
295 // after unwinding. So we deal with such deferred requests here.
296 StackWatermarkSet::after_unwind(current);
297 }
298
299 return fetch_unroll_info_helper(current, exec_mode);
300 JRT_END
301
302 #if COMPILER2_OR_JVMCI
303 // print information about reallocated objects
304 static void print_objects(JavaThread* deoptee_thread,
305 GrowableArray<ScopeValue*>* objects, bool realloc_failures) {
857 case Bytecodes::_athrow:
858 case Bytecodes::_areturn:
859 case Bytecodes::_dreturn:
860 case Bytecodes::_freturn:
861 case Bytecodes::_ireturn:
862 case Bytecodes::_lreturn:
863 case Bytecodes::_jsr:
864 case Bytecodes::_ret:
865 case Bytecodes::_return:
866 case Bytecodes::_lookupswitch:
867 case Bytecodes::_tableswitch:
868 return false;
869 default:
870 return true;
871 }
872 }
873 #endif
874 #endif
875
876 // Return BasicType of value being returned
877 JRT_LEAF(BasicType, Deoptimization::unpack_frames(JavaThread* thread, int exec_mode))
878 assert(thread == JavaThread::current(), "pre-condition");
879
880 // We are already active in the special DeoptResourceMark any ResourceObj's we
881 // allocate will be freed at the end of the routine.
882
883 // JRT_LEAF methods don't normally allocate handles and there is a
884 // NoHandleMark to enforce that. It is actually safe to use Handles
885 // in a JRT_LEAF method, and sometimes desirable, but to do so we
886 // must use ResetNoHandleMark to bypass the NoHandleMark, and
887 // then use a HandleMark to ensure any Handles we do create are
888 // cleaned up in this scope.
889 ResetNoHandleMark rnhm;
890 HandleMark hm(thread);
891
892 frame stub_frame = thread->last_frame();
893
894 Continuation::notify_deopt(thread, stub_frame.sp());
895
896 // Since the frame to unpack is the top frame of this thread, the vframe_array_head
897 // must point to the vframeArray for the unpack frame.
1771 if (monitors != nullptr) {
1772 // Unlock in reverse order starting from most nested monitor.
1773 for (int j = (monitors->number_of_monitors() - 1); j >= 0; j--) {
1774 BasicObjectLock* src = monitors->at(j);
1775 if (src->obj() != nullptr) {
1776 ObjectSynchronizer::exit(src->obj(), src->lock(), thread);
1777 }
1778 }
1779 array->element(i)->free_monitors();
1780 #ifdef ASSERT
1781 array->element(i)->set_removed_monitors();
1782 #endif
1783 }
1784 }
1785 }
1786 #endif
1787
1788 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1789 assert(fr.can_be_deoptimized(), "checking frame type");
1790
1791 gather_statistics(reason, Action_none, Bytecodes::_illegal);
1792
1793 if (LogCompilation && xtty != nullptr) {
1794 nmethod* nm = fr.cb()->as_nmethod_or_null();
1795 assert(nm != nullptr, "only compiled methods can deopt");
1796
1797 ttyLocker ttyl;
1798 xtty->begin_head("deoptimized thread='%zu' reason='%s' pc='" INTPTR_FORMAT "'",(uintx)thread->osthread()->thread_id(), trap_reason_name(reason), p2i(fr.pc()));
1799 nm->log_identity(xtty);
1800 xtty->end_head();
1801 for (ScopeDesc* sd = nm->scope_desc_at(fr.pc()); ; sd = sd->sender()) {
1802 xtty->begin_elem("jvms bci='%d'", sd->bci());
1803 xtty->method(sd->method());
1804 xtty->end_elem();
1805 if (sd->is_top()) break;
1806 }
1807 xtty->tail("deoptimized");
1808 }
1809
1810 Continuation::notify_deopt(thread, fr.sp());
1811
1812 // Patch the compiled method so that when execution returns to it we will
1813 // deopt the execution state and return to the interpreter.
1814 fr.deoptimize(thread);
1815 }
1816
2012 event.set_compileId(nm->compile_id());
2013 event.set_compiler(nm->compiler_type());
2014 event.set_method(method);
2015 event.set_lineNumber(method->line_number_from_bci(trap_bci));
2016 event.set_bci(trap_bci);
2017 event.set_instruction(instruction);
2018 event.set_reason(reason);
2019 event.set_action(action);
2020 event.commit();
2021 }
2022 }
2023
2024 #endif // INCLUDE_JFR
2025
2026 static void log_deopt(nmethod* nm, Method* tm, intptr_t pc, frame& fr, int trap_bci,
2027 const char* reason_name, const char* reason_action) {
2028 LogTarget(Debug, deoptimization) lt;
2029 if (lt.is_enabled()) {
2030 LogStream ls(lt);
2031 bool is_osr = nm->is_osr_method();
2032 ls.print("cid=%4d %s level=%d",
2033 nm->compile_id(), (is_osr ? "osr" : " "), nm->comp_level());
2034 ls.print(" %s", tm->name_and_sig_as_C_string());
2035 ls.print(" trap_bci=%d ", trap_bci);
2036 if (is_osr) {
2037 ls.print("osr_bci=%d ", nm->osr_entry_bci());
2038 }
2039 ls.print("%s ", reason_name);
2040 ls.print("%s ", reason_action);
2041 ls.print_cr("pc=" INTPTR_FORMAT " relative_pc=" INTPTR_FORMAT,
2042 pc, fr.pc() - nm->code_begin());
2043 }
2044 }
2045
2046 JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* current, jint trap_request)) {
2047 HandleMark hm(current);
2048
2049 // uncommon_trap() is called at the beginning of the uncommon trap
2050 // handler. Note this fact before we start generating temporary frames
2051 // that can confuse an asynchronous stack walker. This counter is
2052 // decremented at the end of unpack_frames().
2053
2054 current->inc_in_deopt_handler();
2055
2056 #if INCLUDE_JVMCI
2057 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
2058 RegisterMap reg_map(current,
2059 RegisterMap::UpdateMap::include,
2060 RegisterMap::ProcessFrames::include,
2061 RegisterMap::WalkContinuation::skip);
2062 #else
2063 RegisterMap reg_map(current,
2064 RegisterMap::UpdateMap::skip,
2065 RegisterMap::ProcessFrames::include,
2066 RegisterMap::WalkContinuation::skip);
2102 #if INCLUDE_JVMCI
2103 jlong speculation = current->pending_failed_speculation();
2104 if (nm->is_compiled_by_jvmci()) {
2105 nm->update_speculation(current);
2106 } else {
2107 assert(speculation == 0, "There should not be a speculation for methods compiled by non-JVMCI compilers");
2108 }
2109
2110 if (trap_bci == SynchronizationEntryBCI) {
2111 trap_bci = 0;
2112 current->set_pending_monitorenter(true);
2113 }
2114
2115 if (reason == Deoptimization::Reason_transfer_to_interpreter) {
2116 current->set_pending_transfer_to_interpreter(true);
2117 }
2118 #endif
2119
2120 Bytecodes::Code trap_bc = trap_method->java_code_at(trap_bci);
2121 // Record this event in the histogram.
2122 gather_statistics(reason, action, trap_bc);
2123
2124 // Ensure that we can record deopt. history:
2125 bool create_if_missing = ProfileTraps;
2126
2127 methodHandle profiled_method;
2128 #if INCLUDE_JVMCI
2129 if (nm->is_compiled_by_jvmci()) {
2130 profiled_method = methodHandle(current, nm->method());
2131 } else {
2132 profiled_method = trap_method;
2133 }
2134 #else
2135 profiled_method = trap_method;
2136 #endif
2137
2138 MethodData* trap_mdo =
2139 get_method_data(current, profiled_method, create_if_missing);
2140
2141 { // Log Deoptimization event for JFR, UL and event system
2142 Method* tm = trap_method();
2143 const char* reason_name = trap_reason_name(reason);
2144 const char* reason_action = trap_action_name(action);
2145 intptr_t pc = p2i(fr.pc());
2146
2147 JFR_ONLY(post_deoptimization_event(nm, tm, trap_bci, trap_bc, reason, action);)
2148 log_deopt(nm, tm, pc, fr, trap_bci, reason_name, reason_action);
2149 Events::log_deopt_message(current, "Uncommon trap: reason=%s action=%s pc=" INTPTR_FORMAT " method=%s @ %d %s",
2150 reason_name, reason_action, pc,
2151 tm->name_and_sig_as_C_string(), trap_bci, nm->compiler_name());
2152 }
2153
2154 // Print a bunch of diagnostics, if requested.
2155 if (TraceDeoptimization || LogCompilation || is_receiver_constraint_failure) {
2156 ResourceMark rm;
2157
2158 // Lock to read ProfileData, and ensure lock is not broken by a safepoint
2159 // We must do this already now, since we cannot acquire this lock while
2160 // holding the tty lock (lock ordering by rank).
2161 MutexLocker ml(trap_mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag);
2162
2163 ttyLocker ttyl;
2164
2165 char buf[100];
2166 if (xtty != nullptr) {
2167 xtty->begin_head("uncommon_trap thread='%zu' %s",
2168 os::current_thread_id(),
2169 format_trap_request(buf, sizeof(buf), trap_request));
2170 #if INCLUDE_JVMCI
2171 if (speculation != 0) {
2202 ProfileData* pdata = trap_mdo->bci_to_data(trap_bci);
2203 int dos = (pdata == nullptr)? 0: pdata->trap_state();
2204 if (dos != 0) {
2205 xtty->print(" state='%s'", format_trap_state(buf, sizeof(buf), dos));
2206 if (trap_state_is_recompiled(dos)) {
2207 int recnt2 = trap_mdo->overflow_recompile_count();
2208 if (recnt2 != 0)
2209 xtty->print(" recompiles2='%d'", recnt2);
2210 }
2211 }
2212 }
2213 if (xtty != nullptr) {
2214 xtty->stamp();
2215 xtty->end_head();
2216 }
2217 if (TraceDeoptimization) { // make noise on the tty
2218 stringStream st;
2219 st.print("UNCOMMON TRAP method=%s", trap_scope->method()->name_and_sig_as_C_string());
2220 st.print(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=" INTPTR_FORMAT JVMCI_ONLY(", debug_id=%d"),
2221 trap_scope->bci(), p2i(fr.pc()), fr.pc() - nm->code_begin() JVMCI_ONLY(COMMA debug_id));
2222 st.print(" compiler=%s compile_id=%d", nm->compiler_name(), nm->compile_id());
2223 #if INCLUDE_JVMCI
2224 if (nm->is_compiled_by_jvmci()) {
2225 const char* installed_code_name = nm->jvmci_name();
2226 if (installed_code_name != nullptr) {
2227 st.print(" (JVMCI: installed code name=%s) ", installed_code_name);
2228 }
2229 }
2230 #endif
2231 st.print(" (@" INTPTR_FORMAT ") thread=%zu reason=%s action=%s unloaded_class_index=%d" JVMCI_ONLY(" debug_id=%d"),
2232 p2i(fr.pc()),
2233 os::current_thread_id(),
2234 trap_reason_name(reason),
2235 trap_action_name(action),
2236 unloaded_class_index
2237 #if INCLUDE_JVMCI
2238 , debug_id
2239 #endif
2240 );
2241 if (class_name != nullptr) {
2242 st.print(unresolved ? " unresolved class: " : " symbol: ");
2620 bool ignore_maybe_prior_recompile;
2621 assert(!reason_is_speculate(reason), "reason speculate only used by compiler");
2622 // JVMCI uses the total counts to determine if deoptimizations are happening too frequently -> do not adjust total counts
2623 bool update_total_counts = true JVMCI_ONLY( && !UseJVMCICompiler);
2624
2625 // Lock to read ProfileData, and ensure lock is not broken by a safepoint
2626 MutexLocker ml(trap_mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag);
2627
2628 query_update_method_data(trap_mdo, trap_bci,
2629 (DeoptReason)reason,
2630 update_total_counts,
2631 #if INCLUDE_JVMCI
2632 false,
2633 #endif
2634 nullptr,
2635 ignore_this_trap_count,
2636 ignore_maybe_prior_trap,
2637 ignore_maybe_prior_recompile);
2638 }
2639
2640 Deoptimization::UnrollBlock* Deoptimization::uncommon_trap(JavaThread* current, jint trap_request, jint exec_mode) {
2641 // Enable WXWrite: current function is called from methods compiled by C2 directly
2642 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXWrite, current));
2643
2644 // Still in Java no safepoints
2645 {
2646 // This enters VM and may safepoint
2647 uncommon_trap_inner(current, trap_request);
2648 }
2649 HandleMark hm(current);
2650 return fetch_unroll_info_helper(current, exec_mode);
2651 }
2652
2653 // Local derived constants.
2654 // Further breakdown of DataLayout::trap_state, as promised by DataLayout.
2655 const int DS_REASON_MASK = ((uint)DataLayout::trap_mask) >> 1;
2656 const int DS_RECOMPILE_BIT = DataLayout::trap_mask - DS_REASON_MASK;
2657
2658 //---------------------------trap_state_reason---------------------------------
2659 Deoptimization::DeoptReason
2660 Deoptimization::trap_state_reason(int trap_state) {
2661 // This assert provides the link between the width of DataLayout::trap_bits
2662 // and the encoding of "recorded" reasons. It ensures there are enough
2663 // bits to store all needed reasons in the per-BCI MDO profile.
2664 assert(DS_REASON_MASK >= Reason_RECORDED_LIMIT, "enough bits");
2665 int recompile_bit = (trap_state & DS_RECOMPILE_BIT);
2666 trap_state -= recompile_bit;
2667 if (trap_state == DS_REASON_MASK) {
2668 return Reason_many;
2669 } else {
2670 assert((int)Reason_none == 0, "state=0 => Reason_none");
2671 return (DeoptReason)trap_state;
2821 size_t len;
2822 if (unloaded_class_index < 0) {
2823 len = jio_snprintf(buf, buflen, "reason='%s' action='%s'" JVMCI_ONLY(" debug_id='%d'"),
2824 reason, action
2825 #if INCLUDE_JVMCI
2826 ,debug_id
2827 #endif
2828 );
2829 } else {
2830 len = jio_snprintf(buf, buflen, "reason='%s' action='%s' index='%d'" JVMCI_ONLY(" debug_id='%d'"),
2831 reason, action, unloaded_class_index
2832 #if INCLUDE_JVMCI
2833 ,debug_id
2834 #endif
2835 );
2836 }
2837 return buf;
2838 }
2839
2840 juint Deoptimization::_deoptimization_hist
2841 [Deoptimization::Reason_LIMIT]
2842 [1 + Deoptimization::Action_LIMIT]
2843 [Deoptimization::BC_CASE_LIMIT]
2844 = {0};
2845
2846 enum {
2847 LSB_BITS = 8,
2848 LSB_MASK = right_n_bits(LSB_BITS)
2849 };
2850
2851 void Deoptimization::gather_statistics(DeoptReason reason, DeoptAction action,
2852 Bytecodes::Code bc) {
2853 assert(reason >= 0 && reason < Reason_LIMIT, "oob");
2854 assert(action >= 0 && action < Action_LIMIT, "oob");
2855 _deoptimization_hist[Reason_none][0][0] += 1; // total
2856 _deoptimization_hist[reason][0][0] += 1; // per-reason total
2857 juint* cases = _deoptimization_hist[reason][1+action];
2858 juint* bc_counter_addr = nullptr;
2859 juint bc_counter = 0;
2860 // Look for an unused counter, or an exact match to this BC.
2861 if (bc != Bytecodes::_illegal) {
2862 for (int bc_case = 0; bc_case < BC_CASE_LIMIT; bc_case++) {
2863 juint* counter_addr = &cases[bc_case];
2864 juint counter = *counter_addr;
2865 if ((counter == 0 && bc_counter_addr == nullptr)
2866 || (Bytecodes::Code)(counter & LSB_MASK) == bc) {
2867 // this counter is either free or is already devoted to this BC
2868 bc_counter_addr = counter_addr;
2869 bc_counter = counter | bc;
2870 }
2871 }
2872 }
2873 if (bc_counter_addr == nullptr) {
2874 // Overflow, or no given bytecode.
2875 bc_counter_addr = &cases[BC_CASE_LIMIT-1];
2876 bc_counter = (*bc_counter_addr & ~LSB_MASK); // clear LSB
2877 }
2878 *bc_counter_addr = bc_counter + (1 << LSB_BITS);
2879 }
2880
2881 jint Deoptimization::total_deoptimization_count() {
2882 return _deoptimization_hist[Reason_none][0][0];
2883 }
2884
2885 // Get the deopt count for a specific reason and a specific action. If either
2886 // one of 'reason' or 'action' is null, the method returns the sum of all
2887 // deoptimizations with the specific 'action' or 'reason' respectively.
2888 // If both arguments are null, the method returns the total deopt count.
2889 jint Deoptimization::deoptimization_count(const char *reason_str, const char *action_str) {
2890 if (reason_str == nullptr && action_str == nullptr) {
2891 return total_deoptimization_count();
2892 }
2893 juint counter = 0;
2894 for (int reason = 0; reason < Reason_LIMIT; reason++) {
2895 if (reason_str == nullptr || !strcmp(reason_str, trap_reason_name(reason))) {
2896 for (int action = 0; action < Action_LIMIT; action++) {
2897 if (action_str == nullptr || !strcmp(action_str, trap_action_name(action))) {
2898 juint* cases = _deoptimization_hist[reason][1+action];
2899 for (int bc_case = 0; bc_case < BC_CASE_LIMIT; bc_case++) {
2900 counter += cases[bc_case] >> LSB_BITS;
2901 }
2902 }
2903 }
2904 }
2905 }
2906 return counter;
2907 }
2908
2909 void Deoptimization::print_statistics() {
2910 juint total = total_deoptimization_count();
2911 juint account = total;
2912 if (total != 0) {
2913 ttyLocker ttyl;
2914 if (xtty != nullptr) xtty->head("statistics type='deoptimization'");
2915 tty->print_cr("Deoptimization traps recorded:");
2916 #define PRINT_STAT_LINE(name, r) \
2917 tty->print_cr(" %4d (%4.1f%%) %s", (int)(r), ((r) * 100.0) / total, name);
2918 PRINT_STAT_LINE("total", total);
2919 // For each non-zero entry in the histogram, print the reason,
2920 // the action, and (if specifically known) the type of bytecode.
2921 for (int reason = 0; reason < Reason_LIMIT; reason++) {
2922 for (int action = 0; action < Action_LIMIT; action++) {
2923 juint* cases = _deoptimization_hist[reason][1+action];
2924 for (int bc_case = 0; bc_case < BC_CASE_LIMIT; bc_case++) {
2925 juint counter = cases[bc_case];
2926 if (counter != 0) {
2927 char name[1*K];
2928 Bytecodes::Code bc = (Bytecodes::Code)(counter & LSB_MASK);
2929 if (bc_case == BC_CASE_LIMIT && (int)bc == 0)
2930 bc = Bytecodes::_illegal;
2931 os::snprintf_checked(name, sizeof(name), "%s/%s/%s",
2932 trap_reason_name(reason),
2933 trap_action_name(action),
2934 Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2935 juint r = counter >> LSB_BITS;
2936 tty->print_cr(" %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2937 account -= r;
2938 }
2939 }
2940 }
2941 }
2942 if (account != 0) {
2943 PRINT_STAT_LINE("unaccounted", account);
2944 }
2945 #undef PRINT_STAT_LINE
2946 if (xtty != nullptr) xtty->tail("statistics");
2947 }
2948 }
2949
2950 #else // COMPILER2_OR_JVMCI
2951
2952
2953 // Stubs for C1 only system.
2954 bool Deoptimization::trap_state_is_recompiled(int trap_state) {
2955 return false;
2956 }
2957
2958 const char* Deoptimization::trap_reason_name(int reason) {
2959 return "unknown";
2960 }
2961
2962 jint Deoptimization::total_deoptimization_count() {
2963 return 0;
2964 }
2965
2966 jint Deoptimization::deoptimization_count(const char *reason_str, const char *action_str) {
2967 return 0;
2968 }
2969
2970 void Deoptimization::print_statistics() {
2971 // no output
2972 }
2973
2974 void
2975 Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason) {
2976 // no update
2977 }
2978
2979 int Deoptimization::trap_state_has_reason(int trap_state, int reason) {
2980 return 0;
2981 }
2982
2983 void Deoptimization::gather_statistics(DeoptReason reason, DeoptAction action,
2984 Bytecodes::Code bc) {
2985 // no update
2986 }
2987
2988 const char* Deoptimization::format_trap_state(char* buf, size_t buflen,
2989 int trap_state) {
2990 jio_snprintf(buf, buflen, "#%d", trap_state);
2991 return buf;
2992 }
2993
2994 #endif // COMPILER2_OR_JVMCI
|
63 #include "prims/methodHandles.hpp"
64 #include "prims/vectorSupport.hpp"
65 #include "runtime/atomic.hpp"
66 #include "runtime/basicLock.inline.hpp"
67 #include "runtime/continuation.hpp"
68 #include "runtime/continuationEntry.inline.hpp"
69 #include "runtime/deoptimization.hpp"
70 #include "runtime/escapeBarrier.hpp"
71 #include "runtime/fieldDescriptor.hpp"
72 #include "runtime/fieldDescriptor.inline.hpp"
73 #include "runtime/frame.inline.hpp"
74 #include "runtime/handles.inline.hpp"
75 #include "runtime/interfaceSupport.inline.hpp"
76 #include "runtime/javaThread.hpp"
77 #include "runtime/jniHandles.inline.hpp"
78 #include "runtime/keepStackGCProcessed.hpp"
79 #include "runtime/lightweightSynchronizer.hpp"
80 #include "runtime/lockStack.inline.hpp"
81 #include "runtime/objectMonitor.inline.hpp"
82 #include "runtime/osThread.hpp"
83 #include "runtime/perfData.inline.hpp"
84 #include "runtime/safepointVerifiers.hpp"
85 #include "runtime/sharedRuntime.hpp"
86 #include "runtime/signature.hpp"
87 #include "runtime/stackFrameStream.inline.hpp"
88 #include "runtime/stackValue.hpp"
89 #include "runtime/stackWatermarkSet.hpp"
90 #include "runtime/stubRoutines.hpp"
91 #include "runtime/synchronizer.inline.hpp"
92 #include "runtime/threadSMR.hpp"
93 #include "runtime/threadWXSetters.inline.hpp"
94 #include "runtime/vframe.hpp"
95 #include "runtime/vframeArray.hpp"
96 #include "runtime/vframe_hp.hpp"
97 #include "runtime/vmOperations.hpp"
98 #include "services/management.hpp"
99 #include "utilities/checkedCast.hpp"
100 #include "utilities/events.hpp"
101 #include "utilities/growableArray.hpp"
102 #include "utilities/macros.hpp"
103 #include "utilities/preserveException.hpp"
104 #include "utilities/xmlstream.hpp"
105 #if INCLUDE_JFR
106 #include "jfr/jfr.inline.hpp"
107 #include "jfr/jfrEvents.hpp"
108 #include "jfr/metadata/jfrSerializer.hpp"
109 #endif
110
111 uint64_t DeoptimizationScope::_committed_deopt_gen = 0;
112 uint64_t DeoptimizationScope::_active_deopt_gen = 1;
113 bool DeoptimizationScope::_committing_in_progress = false;
114
115 DeoptimizationScope::DeoptimizationScope() : _required_gen(0) {
116 DEBUG_ONLY(_deopted = false;)
117
118 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
267 return checked_cast<int>(result);
268 }
269
270 void Deoptimization::UnrollBlock::print() {
271 ResourceMark rm;
272 stringStream st;
273 st.print_cr("UnrollBlock");
274 st.print_cr(" size_of_deoptimized_frame = %d", _size_of_deoptimized_frame);
275 st.print( " frame_sizes: ");
276 for (int index = 0; index < number_of_frames(); index++) {
277 st.print("%zd ", frame_sizes()[index]);
278 }
279 st.cr();
280 tty->print_raw(st.freeze());
281 }
282
283 // In order to make fetch_unroll_info work properly with escape
284 // analysis, the method was changed from JRT_LEAF to JRT_BLOCK_ENTRY.
285 // The actual reallocation of previously eliminated objects occurs in realloc_objects,
286 // which is called from the method fetch_unroll_info_helper below.
287 JRT_BLOCK_ENTRY_PROF(Deoptimization::UnrollBlock*, Deoptimization, fetch_unroll_info, Deoptimization::fetch_unroll_info(JavaThread* current, int exec_mode))
288 // fetch_unroll_info() is called at the beginning of the deoptimization
289 // handler. Note this fact before we start generating temporary frames
290 // that can confuse an asynchronous stack walker. This counter is
291 // decremented at the end of unpack_frames().
292 current->inc_in_deopt_handler();
293
294 if (exec_mode == Unpack_exception) {
295 // When we get here, a callee has thrown an exception into a deoptimized
296 // frame. That throw might have deferred stack watermark checking until
297 // after unwinding. So we deal with such deferred requests here.
298 StackWatermarkSet::after_unwind(current);
299 }
300
301 return fetch_unroll_info_helper(current, exec_mode);
302 JRT_END
303
304 #if COMPILER2_OR_JVMCI
305 // print information about reallocated objects
306 static void print_objects(JavaThread* deoptee_thread,
307 GrowableArray<ScopeValue*>* objects, bool realloc_failures) {
859 case Bytecodes::_athrow:
860 case Bytecodes::_areturn:
861 case Bytecodes::_dreturn:
862 case Bytecodes::_freturn:
863 case Bytecodes::_ireturn:
864 case Bytecodes::_lreturn:
865 case Bytecodes::_jsr:
866 case Bytecodes::_ret:
867 case Bytecodes::_return:
868 case Bytecodes::_lookupswitch:
869 case Bytecodes::_tableswitch:
870 return false;
871 default:
872 return true;
873 }
874 }
875 #endif
876 #endif
877
878 // Return BasicType of value being returned
879 JRT_LEAF_PROF_NO_THREAD(BasicType, Deoptimization, unpack_frames, Deoptimization::unpack_frames(JavaThread* thread, int exec_mode))
880 assert(thread == JavaThread::current(), "pre-condition");
881
882 // We are already active in the special DeoptResourceMark any ResourceObj's we
883 // allocate will be freed at the end of the routine.
884
885 // JRT_LEAF methods don't normally allocate handles and there is a
886 // NoHandleMark to enforce that. It is actually safe to use Handles
887 // in a JRT_LEAF method, and sometimes desirable, but to do so we
888 // must use ResetNoHandleMark to bypass the NoHandleMark, and
889 // then use a HandleMark to ensure any Handles we do create are
890 // cleaned up in this scope.
891 ResetNoHandleMark rnhm;
892 HandleMark hm(thread);
893
894 frame stub_frame = thread->last_frame();
895
896 Continuation::notify_deopt(thread, stub_frame.sp());
897
898 // Since the frame to unpack is the top frame of this thread, the vframe_array_head
899 // must point to the vframeArray for the unpack frame.
1773 if (monitors != nullptr) {
1774 // Unlock in reverse order starting from most nested monitor.
1775 for (int j = (monitors->number_of_monitors() - 1); j >= 0; j--) {
1776 BasicObjectLock* src = monitors->at(j);
1777 if (src->obj() != nullptr) {
1778 ObjectSynchronizer::exit(src->obj(), src->lock(), thread);
1779 }
1780 }
1781 array->element(i)->free_monitors();
1782 #ifdef ASSERT
1783 array->element(i)->set_removed_monitors();
1784 #endif
1785 }
1786 }
1787 }
1788 #endif
1789
1790 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1791 assert(fr.can_be_deoptimized(), "checking frame type");
1792
1793 nmethod* nm = fr.cb()->as_nmethod_or_null();
1794 assert(nm != nullptr, "only compiled methods can deopt");
1795 DeoptAction action = (nm->is_not_entrant() ? Action_make_not_entrant : Action_none);
1796 ScopeDesc* cur_sd = nm->scope_desc_at(fr.pc());
1797 Bytecodes::Code bc = (cur_sd->bci() == -1 ? Bytecodes::_nop // deopt on method entry
1798 : cur_sd->method()->java_code_at(cur_sd->bci()));
1799 gather_statistics(nm, reason, action, bc);
1800
1801 if (LogCompilation && xtty != nullptr) {
1802 ttyLocker ttyl;
1803 xtty->begin_head("deoptimized thread='%zu' reason='%s' pc='" INTPTR_FORMAT "'",(uintx)thread->osthread()->thread_id(), trap_reason_name(reason), p2i(fr.pc()));
1804 nm->log_identity(xtty);
1805 xtty->end_head();
1806 for (ScopeDesc* sd = nm->scope_desc_at(fr.pc()); ; sd = sd->sender()) {
1807 xtty->begin_elem("jvms bci='%d'", sd->bci());
1808 xtty->method(sd->method());
1809 xtty->end_elem();
1810 if (sd->is_top()) break;
1811 }
1812 xtty->tail("deoptimized");
1813 }
1814
1815 Continuation::notify_deopt(thread, fr.sp());
1816
1817 // Patch the compiled method so that when execution returns to it we will
1818 // deopt the execution state and return to the interpreter.
1819 fr.deoptimize(thread);
1820 }
1821
2017 event.set_compileId(nm->compile_id());
2018 event.set_compiler(nm->compiler_type());
2019 event.set_method(method);
2020 event.set_lineNumber(method->line_number_from_bci(trap_bci));
2021 event.set_bci(trap_bci);
2022 event.set_instruction(instruction);
2023 event.set_reason(reason);
2024 event.set_action(action);
2025 event.commit();
2026 }
2027 }
2028
2029 #endif // INCLUDE_JFR
2030
2031 static void log_deopt(nmethod* nm, Method* tm, intptr_t pc, frame& fr, int trap_bci,
2032 const char* reason_name, const char* reason_action) {
2033 LogTarget(Debug, deoptimization) lt;
2034 if (lt.is_enabled()) {
2035 LogStream ls(lt);
2036 bool is_osr = nm->is_osr_method();
2037 ls.print("cid=%4d %s%s level=%d",
2038 nm->compile_id(), (is_osr ? "osr" : " "), (nm->preloaded() ? "preload" : ""), nm->comp_level());
2039 ls.print(" %s", tm->name_and_sig_as_C_string());
2040 ls.print(" trap_bci=%d ", trap_bci);
2041 if (is_osr) {
2042 ls.print("osr_bci=%d ", nm->osr_entry_bci());
2043 }
2044 ls.print("%s ", reason_name);
2045 ls.print("%s ", reason_action);
2046 ls.print_cr("pc=" INTPTR_FORMAT " relative_pc=" INTPTR_FORMAT,
2047 pc, fr.pc() - nm->code_begin());
2048 }
2049 }
2050
2051 JRT_ENTRY_PROF(void, Deoptimization, uncommon_trap_inner, Deoptimization::uncommon_trap_inner(JavaThread* current, jint trap_request)) {
2052 HandleMark hm(current);
2053
2054 // uncommon_trap() is called at the beginning of the uncommon trap
2055 // handler. Note this fact before we start generating temporary frames
2056 // that can confuse an asynchronous stack walker. This counter is
2057 // decremented at the end of unpack_frames().
2058
2059 current->inc_in_deopt_handler();
2060
2061 #if INCLUDE_JVMCI
2062 // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
2063 RegisterMap reg_map(current,
2064 RegisterMap::UpdateMap::include,
2065 RegisterMap::ProcessFrames::include,
2066 RegisterMap::WalkContinuation::skip);
2067 #else
2068 RegisterMap reg_map(current,
2069 RegisterMap::UpdateMap::skip,
2070 RegisterMap::ProcessFrames::include,
2071 RegisterMap::WalkContinuation::skip);
2107 #if INCLUDE_JVMCI
2108 jlong speculation = current->pending_failed_speculation();
2109 if (nm->is_compiled_by_jvmci()) {
2110 nm->update_speculation(current);
2111 } else {
2112 assert(speculation == 0, "There should not be a speculation for methods compiled by non-JVMCI compilers");
2113 }
2114
2115 if (trap_bci == SynchronizationEntryBCI) {
2116 trap_bci = 0;
2117 current->set_pending_monitorenter(true);
2118 }
2119
2120 if (reason == Deoptimization::Reason_transfer_to_interpreter) {
2121 current->set_pending_transfer_to_interpreter(true);
2122 }
2123 #endif
2124
2125 Bytecodes::Code trap_bc = trap_method->java_code_at(trap_bci);
2126 // Record this event in the histogram.
2127 gather_statistics(nm, reason, action, trap_bc);
2128
2129 // Ensure that we can record deopt. history:
2130 bool create_if_missing = ProfileTraps;
2131
2132 methodHandle profiled_method;
2133 #if INCLUDE_JVMCI
2134 if (nm->is_compiled_by_jvmci()) {
2135 profiled_method = methodHandle(current, nm->method());
2136 } else {
2137 profiled_method = trap_method;
2138 }
2139 #else
2140 profiled_method = trap_method;
2141 #endif
2142
2143 const char* nm_kind = nm->compile_kind();
2144 MethodData* trap_mdo =
2145 get_method_data(current, profiled_method, create_if_missing);
2146
2147 { // Log Deoptimization event for JFR, UL and event system
2148 Method* tm = trap_method();
2149 const char* reason_name = trap_reason_name(reason);
2150 const char* reason_action = trap_action_name(action);
2151 intptr_t pc = p2i(fr.pc());
2152
2153 JFR_ONLY(post_deoptimization_event(nm, tm, trap_bci, trap_bc, reason, action);)
2154 log_deopt(nm, tm, pc, fr, trap_bci, reason_name, reason_action);
2155 Events::log_deopt_message(current, "Uncommon trap: reason=%s action=%s pc=" INTPTR_FORMAT " method=%s @ %d %s %s",
2156 reason_name, reason_action, pc,
2157 tm->name_and_sig_as_C_string(), trap_bci, nm->compiler_name(), nm_kind);
2158 }
2159
2160 // Print a bunch of diagnostics, if requested.
2161 if (TraceDeoptimization || LogCompilation || is_receiver_constraint_failure) {
2162 ResourceMark rm;
2163
2164 // Lock to read ProfileData, and ensure lock is not broken by a safepoint
2165 // We must do this already now, since we cannot acquire this lock while
2166 // holding the tty lock (lock ordering by rank).
2167 MutexLocker ml(trap_mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag);
2168
2169 ttyLocker ttyl;
2170
2171 char buf[100];
2172 if (xtty != nullptr) {
2173 xtty->begin_head("uncommon_trap thread='%zu' %s",
2174 os::current_thread_id(),
2175 format_trap_request(buf, sizeof(buf), trap_request));
2176 #if INCLUDE_JVMCI
2177 if (speculation != 0) {
2208 ProfileData* pdata = trap_mdo->bci_to_data(trap_bci);
2209 int dos = (pdata == nullptr)? 0: pdata->trap_state();
2210 if (dos != 0) {
2211 xtty->print(" state='%s'", format_trap_state(buf, sizeof(buf), dos));
2212 if (trap_state_is_recompiled(dos)) {
2213 int recnt2 = trap_mdo->overflow_recompile_count();
2214 if (recnt2 != 0)
2215 xtty->print(" recompiles2='%d'", recnt2);
2216 }
2217 }
2218 }
2219 if (xtty != nullptr) {
2220 xtty->stamp();
2221 xtty->end_head();
2222 }
2223 if (TraceDeoptimization) { // make noise on the tty
2224 stringStream st;
2225 st.print("UNCOMMON TRAP method=%s", trap_scope->method()->name_and_sig_as_C_string());
2226 st.print(" bci=%d pc=" INTPTR_FORMAT ", relative_pc=" INTPTR_FORMAT JVMCI_ONLY(", debug_id=%d"),
2227 trap_scope->bci(), p2i(fr.pc()), fr.pc() - nm->code_begin() JVMCI_ONLY(COMMA debug_id));
2228 st.print(" compiler=%s compile_id=%d kind=%s", nm->compiler_name(), nm->compile_id(), nm_kind);
2229 #if INCLUDE_JVMCI
2230 if (nm->is_compiled_by_jvmci()) {
2231 const char* installed_code_name = nm->jvmci_name();
2232 if (installed_code_name != nullptr) {
2233 st.print(" (JVMCI: installed code name=%s) ", installed_code_name);
2234 }
2235 }
2236 #endif
2237 st.print(" (@" INTPTR_FORMAT ") thread=%zu reason=%s action=%s unloaded_class_index=%d" JVMCI_ONLY(" debug_id=%d"),
2238 p2i(fr.pc()),
2239 os::current_thread_id(),
2240 trap_reason_name(reason),
2241 trap_action_name(action),
2242 unloaded_class_index
2243 #if INCLUDE_JVMCI
2244 , debug_id
2245 #endif
2246 );
2247 if (class_name != nullptr) {
2248 st.print(unresolved ? " unresolved class: " : " symbol: ");
2626 bool ignore_maybe_prior_recompile;
2627 assert(!reason_is_speculate(reason), "reason speculate only used by compiler");
2628 // JVMCI uses the total counts to determine if deoptimizations are happening too frequently -> do not adjust total counts
2629 bool update_total_counts = true JVMCI_ONLY( && !UseJVMCICompiler);
2630
2631 // Lock to read ProfileData, and ensure lock is not broken by a safepoint
2632 MutexLocker ml(trap_mdo->extra_data_lock(), Mutex::_no_safepoint_check_flag);
2633
2634 query_update_method_data(trap_mdo, trap_bci,
2635 (DeoptReason)reason,
2636 update_total_counts,
2637 #if INCLUDE_JVMCI
2638 false,
2639 #endif
2640 nullptr,
2641 ignore_this_trap_count,
2642 ignore_maybe_prior_trap,
2643 ignore_maybe_prior_recompile);
2644 }
2645
2646 PROF_ENTRY(Deoptimization::UnrollBlock*, Deoptimization, uncommon_trap, Deoptimization::uncommon_trap(JavaThread* current, jint trap_request, jint exec_mode))
2647 // Enable WXWrite: current function is called from methods compiled by C2 directly
2648 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXWrite, current));
2649
2650 // Still in Java no safepoints
2651 {
2652 // This enters VM and may safepoint
2653 uncommon_trap_inner(current, trap_request);
2654 }
2655 HandleMark hm(current);
2656 return fetch_unroll_info_helper(current, exec_mode);
2657 PROF_END
2658
2659 // Local derived constants.
2660 // Further breakdown of DataLayout::trap_state, as promised by DataLayout.
2661 const int DS_REASON_MASK = ((uint)DataLayout::trap_mask) >> 1;
2662 const int DS_RECOMPILE_BIT = DataLayout::trap_mask - DS_REASON_MASK;
2663
2664 //---------------------------trap_state_reason---------------------------------
2665 Deoptimization::DeoptReason
2666 Deoptimization::trap_state_reason(int trap_state) {
2667 // This assert provides the link between the width of DataLayout::trap_bits
2668 // and the encoding of "recorded" reasons. It ensures there are enough
2669 // bits to store all needed reasons in the per-BCI MDO profile.
2670 assert(DS_REASON_MASK >= Reason_RECORDED_LIMIT, "enough bits");
2671 int recompile_bit = (trap_state & DS_RECOMPILE_BIT);
2672 trap_state -= recompile_bit;
2673 if (trap_state == DS_REASON_MASK) {
2674 return Reason_many;
2675 } else {
2676 assert((int)Reason_none == 0, "state=0 => Reason_none");
2677 return (DeoptReason)trap_state;
2827 size_t len;
2828 if (unloaded_class_index < 0) {
2829 len = jio_snprintf(buf, buflen, "reason='%s' action='%s'" JVMCI_ONLY(" debug_id='%d'"),
2830 reason, action
2831 #if INCLUDE_JVMCI
2832 ,debug_id
2833 #endif
2834 );
2835 } else {
2836 len = jio_snprintf(buf, buflen, "reason='%s' action='%s' index='%d'" JVMCI_ONLY(" debug_id='%d'"),
2837 reason, action, unloaded_class_index
2838 #if INCLUDE_JVMCI
2839 ,debug_id
2840 #endif
2841 );
2842 }
2843 return buf;
2844 }
2845
2846 juint Deoptimization::_deoptimization_hist
2847 [1 + 4 + 5] // total + online + archived
2848 [Deoptimization::Reason_LIMIT]
2849 [1 + Deoptimization::Action_LIMIT]
2850 [Deoptimization::BC_CASE_LIMIT]
2851 = {0};
2852
2853 enum {
2854 LSB_BITS = 8,
2855 LSB_MASK = right_n_bits(LSB_BITS)
2856 };
2857
2858 static void update(juint* cases, Bytecodes::Code bc) {
2859 juint* bc_counter_addr = nullptr;
2860 juint bc_counter = 0;
2861 // Look for an unused counter, or an exact match to this BC.
2862 if (bc != Bytecodes::_illegal) {
2863 for (int bc_case = 0; bc_case < Deoptimization::BC_CASE_LIMIT; bc_case++) {
2864 juint* counter_addr = &cases[bc_case];
2865 juint counter = *counter_addr;
2866 if ((counter == 0 && bc_counter_addr == nullptr)
2867 || (Bytecodes::Code)(counter & LSB_MASK) == bc) {
2868 // this counter is either free or is already devoted to this BC
2869 bc_counter_addr = counter_addr;
2870 bc_counter = counter | bc;
2871 }
2872 }
2873 }
2874 if (bc_counter_addr == nullptr) {
2875 // Overflow, or no given bytecode.
2876 bc_counter_addr = &cases[Deoptimization::BC_CASE_LIMIT-1];
2877 bc_counter = (*bc_counter_addr & ~LSB_MASK); // clear LSB
2878 }
2879 *bc_counter_addr = bc_counter + (1 << LSB_BITS);
2880 }
2881
2882
2883 void Deoptimization::gather_statistics(nmethod* nm, DeoptReason reason, DeoptAction action,
2884 Bytecodes::Code bc) {
2885 assert(reason >= 0 && reason < Reason_LIMIT, "oob");
2886 assert(action >= 0 && action < Action_LIMIT, "oob");
2887 _deoptimization_hist[0][Reason_none][0][0] += 1; // total
2888 _deoptimization_hist[0][reason][0][0] += 1; // per-reason total
2889
2890 update(_deoptimization_hist[0][reason][1+action], bc);
2891
2892 uint lvl = nm->comp_level() + (nm->is_aot() ? 4 : 0) + (nm->preloaded() ? 1 : 0);
2893 _deoptimization_hist[lvl][Reason_none][0][0] += 1; // total
2894 _deoptimization_hist[lvl][reason][0][0] += 1; // per-reason total
2895 update(_deoptimization_hist[lvl][reason][1+action], bc);
2896 }
2897
2898 jint Deoptimization::total_deoptimization_count() {
2899 return _deoptimization_hist[0][Reason_none][0][0];
2900 }
2901
2902 // Get the deopt count for a specific reason and a specific action. If either
2903 // one of 'reason' or 'action' is null, the method returns the sum of all
2904 // deoptimizations with the specific 'action' or 'reason' respectively.
2905 // If both arguments are null, the method returns the total deopt count.
2906 jint Deoptimization::deoptimization_count(const char *reason_str, const char *action_str) {
2907 if (reason_str == nullptr && action_str == nullptr) {
2908 return total_deoptimization_count();
2909 }
2910 juint counter = 0;
2911 for (int reason = 0; reason < Reason_LIMIT; reason++) {
2912 if (reason_str == nullptr || !strcmp(reason_str, trap_reason_name(reason))) {
2913 for (int action = 0; action < Action_LIMIT; action++) {
2914 if (action_str == nullptr || !strcmp(action_str, trap_action_name(action))) {
2915 juint* cases = _deoptimization_hist[0][reason][1+action];
2916 for (int bc_case = 0; bc_case < BC_CASE_LIMIT; bc_case++) {
2917 counter += cases[bc_case] >> LSB_BITS;
2918 }
2919 }
2920 }
2921 }
2922 }
2923 return counter;
2924 }
2925
2926 void Deoptimization::print_statistics() {
2927 ttyLocker ttyl;
2928 if (xtty != nullptr) xtty->head("statistics type='deoptimization'");
2929 tty->print_cr("Deoptimization traps recorded:");
2930 print_statistics_on(tty);
2931 if (xtty != nullptr) xtty->tail("statistics");
2932 }
2933
2934 void Deoptimization::print_statistics_on(const char* title, int lvl, outputStream* st) {
2935 juint total = _deoptimization_hist[lvl][Reason_none][0][0];
2936 juint account = total;
2937 #define PRINT_STAT_LINE(name, r) \
2938 st->print_cr(" %d (%4.1f%%) %s", (int)(r), ((r) == total ? 100.0 : (((r) * 100.0) / total)), name);
2939 if (total > 0) {
2940 st->print(" %s: ", title);
2941 PRINT_STAT_LINE("total", total);
2942 // For each non-zero entry in the histogram, print the reason,
2943 // the action, and (if specifically known) the type of bytecode.
2944 for (int reason = 0; reason < Reason_LIMIT; reason++) {
2945 for (int action = 0; action < Action_LIMIT; action++) {
2946 juint* cases = Deoptimization::_deoptimization_hist[lvl][reason][1+action];
2947 for (int bc_case = 0; bc_case < BC_CASE_LIMIT; bc_case++) {
2948 juint counter = cases[bc_case];
2949 if (counter != 0) {
2950 Bytecodes::Code bc = (Bytecodes::Code)(counter & LSB_MASK);
2951 const char* bc_name = "other";
2952 if (bc_case == (BC_CASE_LIMIT-1) && bc == Bytecodes::_nop) {
2953 // overwritten
2954 } else if (Bytecodes::is_defined(bc)) {
2955 bc_name = Bytecodes::name(bc);
2956 }
2957 juint r = counter >> LSB_BITS;
2958 st->print_cr(" %-34s %16s %16s: " UINT32_FORMAT_W(5) " (%4.1f%%)",
2959 trap_reason_name(reason), trap_action_name(action), bc_name,
2960 r, (r * 100.0) / total);
2961 account -= r;
2962 }
2963 }
2964 }
2965 }
2966 if (account != 0) {
2967 PRINT_STAT_LINE("unaccounted", account);
2968 }
2969 #undef PRINT_STAT_LINE
2970 }
2971 }
2972
2973 void Deoptimization::print_statistics_on(outputStream* st) {
2974 // print_statistics_on("Total", 0, st);
2975 print_statistics_on("Tier1", 1, st);
2976 print_statistics_on("Tier2", 2, st);
2977 print_statistics_on("Tier3", 3, st);
2978 print_statistics_on("Tier4", 4, st);
2979
2980 print_statistics_on("AOT Code Tier1", 5, st);
2981 print_statistics_on("AOT Code Tier2", 6, st);
2982 print_statistics_on("AOT Code Tier4", 8, st);
2983 print_statistics_on("AOT Code Tier5 (preloaded)", 9, st);
2984 }
2985
2986 #define DO_COUNTERS(macro) \
2987 macro(Deoptimization, fetch_unroll_info) \
2988 macro(Deoptimization, unpack_frames) \
2989 macro(Deoptimization, uncommon_trap_inner) \
2990 macro(Deoptimization, uncommon_trap)
2991
2992 #define INIT_COUNTER(sub, name) \
2993 NEWPERFTICKCOUNTERS(_perf_##sub##_##name##_timer, SUN_RT, #sub "::" #name) \
2994 NEWPERFEVENTCOUNTER(_perf_##sub##_##name##_count, SUN_RT, #sub "::" #name "_count");
2995
2996 void Deoptimization::init_counters() {
2997 if (ProfileRuntimeCalls && UsePerfData) {
2998 EXCEPTION_MARK;
2999
3000 DO_COUNTERS(INIT_COUNTER)
3001
3002 if (HAS_PENDING_EXCEPTION) {
3003 vm_exit_during_initialization("jvm_perf_init failed unexpectedly");
3004 }
3005 }
3006 }
3007 #undef INIT_COUNTER
3008
3009 #define PRINT_COUNTER(sub, name) { \
3010 jlong count = _perf_##sub##_##name##_count->get_value(); \
3011 if (count > 0) { \
3012 st->print_cr(" %-50s = " JLONG_FORMAT_W(6) "us (elapsed) " JLONG_FORMAT_W(6) "us (thread) (" JLONG_FORMAT_W(5) " events)", #sub "::" #name, \
3013 _perf_##sub##_##name##_timer->elapsed_counter_value_us(), \
3014 _perf_##sub##_##name##_timer->thread_counter_value_us(), \
3015 count); \
3016 }}
3017
3018 void Deoptimization::print_counters_on(outputStream* st) {
3019 if (ProfileRuntimeCalls && UsePerfData) {
3020 DO_COUNTERS(PRINT_COUNTER)
3021 } else {
3022 st->print_cr(" Deoptimization: no info (%s is disabled)", (UsePerfData ? "ProfileRuntimeCalls" : "UsePerfData"));
3023 }
3024 }
3025
3026 #undef PRINT_COUNTER
3027 #undef DO_COUNTERS
3028
3029 #else // COMPILER2_OR_JVMCI
3030
3031
3032 // Stubs for C1 only system.
3033 bool Deoptimization::trap_state_is_recompiled(int trap_state) {
3034 return false;
3035 }
3036
3037 const char* Deoptimization::trap_reason_name(int reason) {
3038 return "unknown";
3039 }
3040
3041 jint Deoptimization::total_deoptimization_count() {
3042 return 0;
3043 }
3044
3045 jint Deoptimization::deoptimization_count(const char *reason_str, const char *action_str) {
3046 return 0;
3047 }
3048
3049 void Deoptimization::print_statistics() {
3050 // no output
3051 }
3052
3053 void
3054 Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason) {
3055 // no update
3056 }
3057
3058 int Deoptimization::trap_state_has_reason(int trap_state, int reason) {
3059 return 0;
3060 }
3061
3062 void Deoptimization::gather_statistics(nmethod* nm, DeoptReason reason, DeoptAction action,
3063 Bytecodes::Code bc) {
3064 // no update
3065 }
3066
3067 const char* Deoptimization::format_trap_state(char* buf, size_t buflen,
3068 int trap_state) {
3069 jio_snprintf(buf, buflen, "#%d", trap_state);
3070 return buf;
3071 }
3072
3073 void Deoptimization::init_counters() {
3074 // nothing to do
3075 }
3076
3077 void Deoptimization::print_counters_on(outputStream* st) {
3078 // no output
3079 }
3080
3081 void Deoptimization::print_statistics_on(outputStream* st) {
3082 // no output
3083 }
3084
3085 #endif // COMPILER2_OR_JVMCI
|