48 #include "memory/universe.hpp"
49 #include "metaprogramming/primitiveConversions.hpp"
50 #include "oops/klass.hpp"
51 #include "oops/method.inline.hpp"
52 #include "oops/objArrayKlass.hpp"
53 #include "oops/oop.inline.hpp"
54 #include "prims/forte.hpp"
55 #include "prims/jvmtiExport.hpp"
56 #include "prims/jvmtiThreadState.hpp"
57 #include "prims/methodHandles.hpp"
58 #include "prims/nativeLookup.hpp"
59 #include "runtime/arguments.hpp"
60 #include "runtime/atomic.hpp"
61 #include "runtime/frame.inline.hpp"
62 #include "runtime/handles.inline.hpp"
63 #include "runtime/init.hpp"
64 #include "runtime/interfaceSupport.inline.hpp"
65 #include "runtime/java.hpp"
66 #include "runtime/javaCalls.hpp"
67 #include "runtime/jniHandles.inline.hpp"
68 #include "runtime/perfData.hpp"
69 #include "runtime/sharedRuntime.hpp"
70 #include "runtime/stackWatermarkSet.hpp"
71 #include "runtime/stubRoutines.hpp"
72 #include "runtime/synchronizer.hpp"
73 #include "runtime/vframe.inline.hpp"
74 #include "runtime/vframeArray.hpp"
75 #include "runtime/vm_version.hpp"
76 #include "utilities/copy.hpp"
77 #include "utilities/dtrace.hpp"
78 #include "utilities/events.hpp"
79 #include "utilities/resourceHash.hpp"
80 #include "utilities/macros.hpp"
81 #include "utilities/xmlstream.hpp"
82 #ifdef COMPILER1
83 #include "c1/c1_Runtime1.hpp"
84 #endif
85 #if INCLUDE_JFR
86 #include "jfr/jfr.hpp"
87 #endif
88
89 // Shared stub locations
90 RuntimeStub* SharedRuntime::_wrong_method_blob;
91 RuntimeStub* SharedRuntime::_wrong_method_abstract_blob;
92 RuntimeStub* SharedRuntime::_ic_miss_blob;
93 RuntimeStub* SharedRuntime::_resolve_opt_virtual_call_blob;
94 RuntimeStub* SharedRuntime::_resolve_virtual_call_blob;
95 RuntimeStub* SharedRuntime::_resolve_static_call_blob;
96 address SharedRuntime::_resolve_static_call_entry;
97
98 DeoptimizationBlob* SharedRuntime::_deopt_blob;
99 SafepointBlob* SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
100 SafepointBlob* SharedRuntime::_polling_page_safepoint_handler_blob;
101 SafepointBlob* SharedRuntime::_polling_page_return_handler_blob;
102
103 #ifdef COMPILER2
104 UncommonTrapBlob* SharedRuntime::_uncommon_trap_blob;
105 #endif // COMPILER2
106
107 nmethod* SharedRuntime::_cont_doYield_stub;
108
109 //----------------------------generate_stubs-----------------------------------
110 void SharedRuntime::generate_stubs() {
111 _wrong_method_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method), "wrong_method_stub");
112 _wrong_method_abstract_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
113 _ic_miss_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss), "ic_miss_stub");
114 _resolve_opt_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C), "resolve_opt_virtual_call");
115 _resolve_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C), "resolve_virtual_call");
116 _resolve_static_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C), "resolve_static_call");
117 _resolve_static_call_entry = _resolve_static_call_blob->entry_point();
118
119 AdapterHandlerLibrary::initialize();
120
121 #if COMPILER2_OR_JVMCI
122 // Vectors are generated only by C2 and JVMCI.
123 bool support_wide = is_wide_vector(MaxVectorSize);
124 if (support_wide) {
125 _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
126 }
127 #endif // COMPILER2_OR_JVMCI
128 _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
129 _polling_page_return_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
130
131 generate_deopt_blob();
132
133 #ifdef COMPILER2
134 generate_uncommon_trap_blob();
135 #endif // COMPILER2
136 }
137
138 #include <math.h>
139
140 // Implementation of SharedRuntime
141
142 #ifndef PRODUCT
143 // For statistics
144 uint SharedRuntime::_ic_miss_ctr = 0;
145 uint SharedRuntime::_wrong_method_ctr = 0;
146 uint SharedRuntime::_resolve_static_ctr = 0;
147 uint SharedRuntime::_resolve_virtual_ctr = 0;
148 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
149 uint SharedRuntime::_implicit_null_throws = 0;
150 uint SharedRuntime::_implicit_div0_throws = 0;
151
152 int64_t SharedRuntime::_nof_normal_calls = 0;
153 int64_t SharedRuntime::_nof_inlined_calls = 0;
154 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
155 int64_t SharedRuntime::_nof_static_calls = 0;
156 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
157 int64_t SharedRuntime::_nof_interface_calls = 0;
158 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
159
160 uint SharedRuntime::_new_instance_ctr=0;
161 uint SharedRuntime::_new_array_ctr=0;
162 uint SharedRuntime::_multi2_ctr=0;
163 uint SharedRuntime::_multi3_ctr=0;
164 uint SharedRuntime::_multi4_ctr=0;
165 uint SharedRuntime::_multi5_ctr=0;
166 uint SharedRuntime::_mon_enter_stub_ctr=0;
167 uint SharedRuntime::_mon_exit_stub_ctr=0;
168 uint SharedRuntime::_mon_enter_ctr=0;
182 uint SharedRuntime::_unsafe_set_memory_ctr=0;
183
184 int SharedRuntime::_ICmiss_index = 0;
185 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
186 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
187
188
189 void SharedRuntime::trace_ic_miss(address at) {
190 for (int i = 0; i < _ICmiss_index; i++) {
191 if (_ICmiss_at[i] == at) {
192 _ICmiss_count[i]++;
193 return;
194 }
195 }
196 int index = _ICmiss_index++;
197 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
198 _ICmiss_at[index] = at;
199 _ICmiss_count[index] = 1;
200 }
201
202 void SharedRuntime::print_ic_miss_histogram() {
203 if (ICMissHistogram) {
204 tty->print_cr("IC Miss Histogram:");
205 int tot_misses = 0;
206 for (int i = 0; i < _ICmiss_index; i++) {
207 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
208 tot_misses += _ICmiss_count[i];
209 }
210 tty->print_cr("Total IC misses: %7d", tot_misses);
211 }
212 }
213 #endif // PRODUCT
214
215
216 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
217 return x * y;
218 JRT_END
219
220
221 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
222 if (x == min_jlong && y == CONST64(-1)) {
223 return x;
224 } else {
225 return x / y;
226 }
227 JRT_END
228
229
230 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
231 if (x == min_jlong && y == CONST64(-1)) {
232 return 0;
233 } else {
660 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
661 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
662 JNIHandles::destroy_local(vthread);
663 JRT_END
664 #endif // INCLUDE_JVMTI
665
666 // The interpreter code to call this tracing function is only
667 // called/generated when UL is on for redefine, class and has the right level
668 // and tags. Since obsolete methods are never compiled, we don't have
669 // to modify the compilers to generate calls to this function.
670 //
671 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
672 JavaThread* thread, Method* method))
673 if (method->is_obsolete()) {
674 // We are calling an obsolete method, but this is not necessarily
675 // an error. Our method could have been redefined just after we
676 // fetched the Method* from the constant pool.
677 ResourceMark rm;
678 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
679 }
680 return 0;
681 JRT_END
682
683 // ret_pc points into caller; we are returning caller's exception handler
684 // for given exception
685 // Note that the implementation of this method assumes it's only called when an exception has actually occured
686 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
687 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
688 assert(nm != nullptr, "must exist");
689 ResourceMark rm;
690
691 #if INCLUDE_JVMCI
692 if (nm->is_compiled_by_jvmci()) {
693 // lookup exception handler for this pc
694 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
695 ExceptionHandlerTable table(nm);
696 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
697 if (t != nullptr) {
698 return nm->code_begin() + t->pco();
699 } else {
1299
1300 // determine call info & receiver
1301 // note: a) receiver is null for static calls
1302 // b) an exception is thrown if receiver is null for non-static calls
1303 CallInfo call_info;
1304 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1305 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1306
1307 NoSafepointVerifier nsv;
1308
1309 methodHandle callee_method(current, call_info.selected_method());
1310
1311 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1312 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1313 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1314 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1315 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1316
1317 assert(!caller_nm->is_unloading(), "It should not be unloading");
1318
1319 #ifndef PRODUCT
1320 // tracing/debugging/statistics
1321 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1322 (is_virtual) ? (&_resolve_virtual_ctr) :
1323 (&_resolve_static_ctr);
1324 Atomic::inc(addr);
1325
1326 if (TraceCallFixup) {
1327 ResourceMark rm(current);
1328 tty->print("resolving %s%s (%s) call to",
1329 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1330 Bytecodes::name(invoke_code));
1331 callee_method->print_short_name(tty);
1332 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1333 p2i(caller_frame.pc()), p2i(callee_method->code()));
1334 }
1335 #endif
1336
1337 if (invoke_code == Bytecodes::_invokestatic) {
1338 assert(callee_method->method_holder()->is_initialized() ||
1339 callee_method->method_holder()->is_reentrant_initialization(current),
1340 "invalid class initialization state for invoke_static");
1341 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1342 // In order to keep class initialization check, do not patch call
1343 // site for static call when the class is not fully initialized.
1344 // Proper check is enforced by call site re-resolution on every invocation.
1345 //
1361
1362 // Make sure the callee nmethod does not get deoptimized and removed before
1363 // we are done patching the code.
1364
1365
1366 CompiledICLocker ml(caller_nm);
1367 if (is_virtual && !is_optimized) {
1368 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1369 inline_cache->update(&call_info, receiver->klass());
1370 } else {
1371 // Callsite is a direct call - set it to the destination method
1372 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1373 callsite->set(callee_method);
1374 }
1375
1376 return callee_method;
1377 }
1378
1379 // Inline caches exist only in compiled code
1380 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1381 #ifdef ASSERT
1382 RegisterMap reg_map(current,
1383 RegisterMap::UpdateMap::skip,
1384 RegisterMap::ProcessFrames::include,
1385 RegisterMap::WalkContinuation::skip);
1386 frame stub_frame = current->last_frame();
1387 assert(stub_frame.is_runtime_frame(), "sanity check");
1388 frame caller_frame = stub_frame.sender(®_map);
1389 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1390 #endif /* ASSERT */
1391
1392 methodHandle callee_method;
1393 JRT_BLOCK
1394 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1395 // Return Method* through TLS
1396 current->set_vm_result_2(callee_method());
1397 JRT_BLOCK_END
1398 // return compiled code entry point after potential safepoints
1399 return get_resolved_entry(current, callee_method);
1400 JRT_END
1401
1402
1403 // Handle call site that has been made non-entrant
1404 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1405 // 6243940 We might end up in here if the callee is deoptimized
1406 // as we race to call it. We don't want to take a safepoint if
1407 // the caller was interpreted because the caller frame will look
1408 // interpreted to the stack walkers and arguments are now
1409 // "compiled" so it is much better to make this transition
1410 // invisible to the stack walking code. The i2c path will
1411 // place the callee method in the callee_target. It is stashed
1412 // there because if we try and find the callee by normal means a
1413 // safepoint is possible and have trouble gc'ing the compiled args.
1414 RegisterMap reg_map(current,
1415 RegisterMap::UpdateMap::skip,
1416 RegisterMap::ProcessFrames::include,
1417 RegisterMap::WalkContinuation::skip);
1418 frame stub_frame = current->last_frame();
1419 assert(stub_frame.is_runtime_frame(), "sanity check");
1420 frame caller_frame = stub_frame.sender(®_map);
1421
1422 if (caller_frame.is_interpreted_frame() ||
1423 caller_frame.is_entry_frame() ||
1424 caller_frame.is_upcall_stub_frame()) {
1437 // so bypassing it in c2i adapter is benign.
1438 return callee->get_c2i_no_clinit_check_entry();
1439 } else {
1440 return callee->get_c2i_entry();
1441 }
1442 }
1443
1444 // Must be compiled to compiled path which is safe to stackwalk
1445 methodHandle callee_method;
1446 JRT_BLOCK
1447 // Force resolving of caller (if we called from compiled frame)
1448 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1449 current->set_vm_result_2(callee_method());
1450 JRT_BLOCK_END
1451 // return compiled code entry point after potential safepoints
1452 return get_resolved_entry(current, callee_method);
1453 JRT_END
1454
1455 // Handle abstract method call
1456 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1457 // Verbose error message for AbstractMethodError.
1458 // Get the called method from the invoke bytecode.
1459 vframeStream vfst(current, true);
1460 assert(!vfst.at_end(), "Java frame must exist");
1461 methodHandle caller(current, vfst.method());
1462 Bytecode_invoke invoke(caller, vfst.bci());
1463 DEBUG_ONLY( invoke.verify(); )
1464
1465 // Find the compiled caller frame.
1466 RegisterMap reg_map(current,
1467 RegisterMap::UpdateMap::include,
1468 RegisterMap::ProcessFrames::include,
1469 RegisterMap::WalkContinuation::skip);
1470 frame stubFrame = current->last_frame();
1471 assert(stubFrame.is_runtime_frame(), "must be");
1472 frame callerFrame = stubFrame.sender(®_map);
1473 assert(callerFrame.is_compiled_frame(), "must be");
1474
1475 // Install exception and return forward entry.
1476 address res = StubRoutines::throw_AbstractMethodError_entry();
1483 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1484 }
1485 JRT_BLOCK_END
1486 return res;
1487 JRT_END
1488
1489 // return verified_code_entry if interp_only_mode is not set for the current thread;
1490 // otherwise return c2i entry.
1491 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1492 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1493 // In interp_only_mode we need to go to the interpreted entry
1494 // The c2i won't patch in this mode -- see fixup_callers_callsite
1495 return callee_method->get_c2i_entry();
1496 }
1497 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1498 return callee_method->verified_code_entry();
1499 }
1500
1501 // resolve a static call and patch code
1502 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1503 methodHandle callee_method;
1504 bool enter_special = false;
1505 JRT_BLOCK
1506 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1507 current->set_vm_result_2(callee_method());
1508 JRT_BLOCK_END
1509 // return compiled code entry point after potential safepoints
1510 return get_resolved_entry(current, callee_method);
1511 JRT_END
1512
1513 // resolve virtual call and update inline cache to monomorphic
1514 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1515 methodHandle callee_method;
1516 JRT_BLOCK
1517 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1518 current->set_vm_result_2(callee_method());
1519 JRT_BLOCK_END
1520 // return compiled code entry point after potential safepoints
1521 return get_resolved_entry(current, callee_method);
1522 JRT_END
1523
1524
1525 // Resolve a virtual call that can be statically bound (e.g., always
1526 // monomorphic, so it has no inline cache). Patch code to resolved target.
1527 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1528 methodHandle callee_method;
1529 JRT_BLOCK
1530 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1531 current->set_vm_result_2(callee_method());
1532 JRT_BLOCK_END
1533 // return compiled code entry point after potential safepoints
1534 return get_resolved_entry(current, callee_method);
1535 JRT_END
1536
1537 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1538 JavaThread* current = THREAD;
1539 ResourceMark rm(current);
1540 CallInfo call_info;
1541 Bytecodes::Code bc;
1542
1543 // receiver is null for static calls. An exception is thrown for null
1544 // receivers for non-static calls
1545 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1546
1547 methodHandle callee_method(current, call_info.selected_method());
1548
1549 #ifndef PRODUCT
1550 Atomic::inc(&_ic_miss_ctr);
1551
1552 // Statistics & Tracing
1553 if (TraceCallFixup) {
1554 ResourceMark rm(current);
1555 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1556 callee_method->print_short_name(tty);
1557 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1558 }
1559
1560 if (ICMissHistogram) {
1561 MutexLocker m(VMStatistic_lock);
1562 RegisterMap reg_map(current,
1563 RegisterMap::UpdateMap::skip,
1564 RegisterMap::ProcessFrames::include,
1565 RegisterMap::WalkContinuation::skip);
1566 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1567 // produce statistics under the lock
1568 trace_ic_miss(f.pc());
1569 }
1570 #endif
1571
1654 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1655 cdc->set_to_clean();
1656 break;
1657 }
1658
1659 case relocInfo::virtual_call_type: {
1660 // compiled, dispatched call (which used to call an interpreted method)
1661 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1662 inline_cache->set_to_clean();
1663 break;
1664 }
1665 default:
1666 break;
1667 }
1668 }
1669 }
1670 }
1671
1672 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1673
1674
1675 #ifndef PRODUCT
1676 Atomic::inc(&_wrong_method_ctr);
1677
1678 if (TraceCallFixup) {
1679 ResourceMark rm(current);
1680 tty->print("handle_wrong_method reresolving call to");
1681 callee_method->print_short_name(tty);
1682 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1683 }
1684 #endif
1685
1686 return callee_method;
1687 }
1688
1689 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1690 // The faulting unsafe accesses should be changed to throw the error
1691 // synchronously instead. Meanwhile the faulting instruction will be
1692 // skipped over (effectively turning it into a no-op) and an
1693 // asynchronous exception will be raised which the thread will
1694 // handle at a later point. If the instruction is a load it will
1695 // return garbage.
1696
1697 // Request an async exception.
1935 // This is only called when CheckJNICalls is true, and only
1936 // for virtual thread termination.
1937 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
1938 assert(CheckJNICalls, "Only call this when checking JNI usage");
1939 if (log_is_enabled(Debug, jni)) {
1940 JavaThread* current = JavaThread::current();
1941 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
1942 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
1943 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
1944 ") exiting with Objects still locked by JNI MonitorEnter.",
1945 vthread_id, carrier_id);
1946 }
1947 JRT_END
1948
1949 #ifndef PRODUCT
1950
1951 void SharedRuntime::print_statistics() {
1952 ttyLocker ttyl;
1953 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
1954
1955 SharedRuntime::print_ic_miss_histogram();
1956
1957 // Dump the JRT_ENTRY counters
1958 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
1959 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
1960 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
1961 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
1962 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
1963 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
1964
1965 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
1966 tty->print_cr("%5u wrong method", _wrong_method_ctr);
1967 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
1968 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
1969 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
1970
1971 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
1972 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
1973 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
1974 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
1975 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
1976 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
1977 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
1978 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
1979 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
1980 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
1981 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
1982 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
1983 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
1984 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
1985 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
1986 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
1987 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
1988
1989 AdapterHandlerLibrary::print_statistics();
1990
1991 if (xtty != nullptr) xtty->tail("statistics");
1992 }
1993
1994 inline double percent(int64_t x, int64_t y) {
1995 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
1996 }
1997
1998 class MethodArityHistogram {
1999 public:
2000 enum { MAX_ARITY = 256 };
2001 private:
2002 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2003 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2004 static uint64_t _total_compiled_calls;
2005 static uint64_t _max_compiled_calls_per_method;
2006 static int _max_arity; // max. arity seen
2007 static int _max_size; // max. arg size seen
2008
2009 static void add_method_to_histogram(nmethod* nm) {
2010 Method* method = (nm == nullptr) ? nullptr : nm->method();
2011 if (method != nullptr) {
2012 ArgumentCount args(method->signature());
2013 int arity = args.size() + (method->is_static() ? 0 : 1);
2058 // Take the Compile_lock to protect against changes in the CodeBlob structures
2059 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2060 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2061 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2062 _max_arity = _max_size = 0;
2063 _total_compiled_calls = 0;
2064 _max_compiled_calls_per_method = 0;
2065 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2066 CodeCache::nmethods_do(add_method_to_histogram);
2067 print_histogram();
2068 }
2069 };
2070
2071 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2072 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2073 uint64_t MethodArityHistogram::_total_compiled_calls;
2074 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2075 int MethodArityHistogram::_max_arity;
2076 int MethodArityHistogram::_max_size;
2077
2078 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2079 tty->print_cr("Calls from compiled code:");
2080 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2081 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2082 int64_t mono_i = _nof_interface_calls;
2083 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2084 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2085 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2086 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2087 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2088 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2089 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2090 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2091 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2092 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2093 tty->cr();
2094 tty->print_cr("Note 1: counter updates are not MT-safe.");
2095 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2096 tty->print_cr(" %% in nested categories are relative to their category");
2097 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2098 tty->cr();
2311 AdapterFingerPrint::equals>;
2312 static AdapterHandlerTable* _adapter_handler_table;
2313
2314 // Find a entry with the same fingerprint if it exists
2315 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2316 NOT_PRODUCT(_lookups++);
2317 assert_lock_strong(AdapterHandlerLibrary_lock);
2318 AdapterFingerPrint fp(total_args_passed, sig_bt);
2319 AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2320 if (entry != nullptr) {
2321 #ifndef PRODUCT
2322 if (fp.is_compact()) _compact++;
2323 _hits++;
2324 #endif
2325 return *entry;
2326 }
2327 return nullptr;
2328 }
2329
2330 #ifndef PRODUCT
2331 static void print_table_statistics() {
2332 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2333 return sizeof(*key) + sizeof(*a);
2334 };
2335 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2336 ts.print(tty, "AdapterHandlerTable");
2337 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2338 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2339 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2340 _lookups, _equals, _hits, _compact);
2341 }
2342 #endif
2343
2344 // ---------------------------------------------------------------------------
2345 // Implementation of AdapterHandlerLibrary
2346 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2347 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2348 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2349 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2350 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2351 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2352 const int AdapterHandlerLibrary_size = 16*K;
2353 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2354
2355 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2356 return _buffer;
2357 }
2358
2359 static void post_adapter_creation(const AdapterBlob* new_adapter,
2360 const AdapterHandlerEntry* entry) {
2361 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2362 char blob_id[256];
3008 assert(found, "Should have found handler");
3009 }
3010
3011 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3012 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3013 if (get_i2c_entry() != nullptr) {
3014 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3015 }
3016 if (get_c2i_entry() != nullptr) {
3017 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3018 }
3019 if (get_c2i_unverified_entry() != nullptr) {
3020 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3021 }
3022 if (get_c2i_no_clinit_check_entry() != nullptr) {
3023 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3024 }
3025 st->cr();
3026 }
3027
3028 #ifndef PRODUCT
3029
3030 void AdapterHandlerLibrary::print_statistics() {
3031 print_table_statistics();
3032 }
3033
3034 #endif /* PRODUCT */
3035
3036 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3037 assert(current == JavaThread::current(), "pre-condition");
3038 StackOverflow* overflow_state = current->stack_overflow_state();
3039 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3040 overflow_state->set_reserved_stack_activation(current->stack_base());
3041 JRT_END
3042
3043 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3044 ResourceMark rm(current);
3045 frame activation;
3046 nmethod* nm = nullptr;
3047 int count = 1;
3048
3049 assert(fr.is_java_frame(), "Must start on Java frame");
3050
3051 RegisterMap map(JavaThread::current(),
3052 RegisterMap::UpdateMap::skip,
3053 RegisterMap::ProcessFrames::skip,
3054 RegisterMap::WalkContinuation::skip); // don't walk continuations
3055 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
48 #include "memory/universe.hpp"
49 #include "metaprogramming/primitiveConversions.hpp"
50 #include "oops/klass.hpp"
51 #include "oops/method.inline.hpp"
52 #include "oops/objArrayKlass.hpp"
53 #include "oops/oop.inline.hpp"
54 #include "prims/forte.hpp"
55 #include "prims/jvmtiExport.hpp"
56 #include "prims/jvmtiThreadState.hpp"
57 #include "prims/methodHandles.hpp"
58 #include "prims/nativeLookup.hpp"
59 #include "runtime/arguments.hpp"
60 #include "runtime/atomic.hpp"
61 #include "runtime/frame.inline.hpp"
62 #include "runtime/handles.inline.hpp"
63 #include "runtime/init.hpp"
64 #include "runtime/interfaceSupport.inline.hpp"
65 #include "runtime/java.hpp"
66 #include "runtime/javaCalls.hpp"
67 #include "runtime/jniHandles.inline.hpp"
68 #include "runtime/perfData.inline.hpp"
69 #include "runtime/sharedRuntime.hpp"
70 #include "runtime/stackWatermarkSet.hpp"
71 #include "runtime/stubRoutines.hpp"
72 #include "runtime/synchronizer.hpp"
73 #include "runtime/vframe.inline.hpp"
74 #include "runtime/vframeArray.hpp"
75 #include "runtime/vm_version.hpp"
76 #include "services/management.hpp"
77 #include "utilities/copy.hpp"
78 #include "utilities/dtrace.hpp"
79 #include "utilities/events.hpp"
80 #include "utilities/resourceHash.hpp"
81 #include "utilities/macros.hpp"
82 #include "utilities/xmlstream.hpp"
83 #ifdef COMPILER1
84 #include "c1/c1_Runtime1.hpp"
85 #endif
86 #if INCLUDE_JFR
87 #include "jfr/jfr.hpp"
88 #endif
89
90 // Shared stub locations
91 RuntimeStub* SharedRuntime::_wrong_method_blob;
92 RuntimeStub* SharedRuntime::_wrong_method_abstract_blob;
93 RuntimeStub* SharedRuntime::_ic_miss_blob;
94 RuntimeStub* SharedRuntime::_resolve_opt_virtual_call_blob;
95 RuntimeStub* SharedRuntime::_resolve_virtual_call_blob;
96 RuntimeStub* SharedRuntime::_resolve_static_call_blob;
97 address SharedRuntime::_resolve_static_call_entry;
98
99 DeoptimizationBlob* SharedRuntime::_deopt_blob;
100 SafepointBlob* SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
101 SafepointBlob* SharedRuntime::_polling_page_safepoint_handler_blob;
102 SafepointBlob* SharedRuntime::_polling_page_return_handler_blob;
103
104 #ifdef COMPILER2
105 UncommonTrapBlob* SharedRuntime::_uncommon_trap_blob;
106 #endif // COMPILER2
107
108 nmethod* SharedRuntime::_cont_doYield_stub;
109
110 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
111 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
112 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
113 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
114 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
115
116 //----------------------------generate_stubs-----------------------------------
117 void SharedRuntime::generate_stubs() {
118 _wrong_method_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method), "wrong_method_stub");
119 _wrong_method_abstract_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
120 _ic_miss_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss), "ic_miss_stub");
121 _resolve_opt_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C), "resolve_opt_virtual_call");
122 _resolve_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C), "resolve_virtual_call");
123 _resolve_static_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C), "resolve_static_call");
124 _resolve_static_call_entry = _resolve_static_call_blob->entry_point();
125
126 AdapterHandlerLibrary::initialize();
127
128 #if COMPILER2_OR_JVMCI
129 // Vectors are generated only by C2 and JVMCI.
130 bool support_wide = is_wide_vector(MaxVectorSize);
131 if (support_wide) {
132 _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
133 }
134 #endif // COMPILER2_OR_JVMCI
135 _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
136 _polling_page_return_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
137
138 generate_deopt_blob();
139
140 #ifdef COMPILER2
141 generate_uncommon_trap_blob();
142 #endif // COMPILER2
143 if (UsePerfData) {
144 EXCEPTION_MARK;
145 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
146 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
147 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
148 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
149 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
150 if (HAS_PENDING_EXCEPTION) {
151 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
152 }
153 }
154 }
155
156 void SharedRuntime::print_counters_on(outputStream* st) {
157 st->print_cr("SharedRuntime:");
158 if (UsePerfData) {
159 st->print_cr(" resolve_opt_virtual_call: %5ldms (elapsed) %5ldms (thread) / %5d events",
160 _perf_resolve_opt_virtual_total_time->elapsed_counter_value_ms(),
161 _perf_resolve_opt_virtual_total_time->thread_counter_value_ms(),
162 _resolve_opt_virtual_ctr);
163 st->print_cr(" resolve_virtual_call: %5ldms (elapsed) %5ldms (thread) / %5d events",
164 _perf_resolve_virtual_total_time->elapsed_counter_value_ms(),
165 _perf_resolve_virtual_total_time->thread_counter_value_ms(),
166 _resolve_virtual_ctr);
167 st->print_cr(" resolve_static_call: %5ldms (elapsed) %5ldms (thread) / %5d events",
168 _perf_resolve_static_total_time->elapsed_counter_value_ms(),
169 _perf_resolve_static_total_time->thread_counter_value_ms(),
170 _resolve_static_ctr);
171 st->print_cr(" handle_wrong_method: %5ldms (elapsed) %5ldms (thread) / %5d events",
172 _perf_handle_wrong_method_total_time->elapsed_counter_value_ms(),
173 _perf_handle_wrong_method_total_time->thread_counter_value_ms(),
174 _wrong_method_ctr);
175 st->print_cr(" ic_miss: %5ldms (elapsed) %5ldms (thread) / %5d events",
176 _perf_ic_miss_total_time->elapsed_counter_value_ms(),
177 _perf_ic_miss_total_time->thread_counter_value_ms(),
178 _ic_miss_ctr);
179
180 jlong total_elapsed_time_ms = Management::ticks_to_ms(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
181 _perf_resolve_virtual_total_time->elapsed_counter_value() +
182 _perf_resolve_static_total_time->elapsed_counter_value() +
183 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
184 _perf_ic_miss_total_time->elapsed_counter_value());
185 jlong total_thread_time_ms = Management::ticks_to_ms(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
186 _perf_resolve_virtual_total_time->thread_counter_value() +
187 _perf_resolve_static_total_time->thread_counter_value() +
188 _perf_handle_wrong_method_total_time->thread_counter_value() +
189 _perf_ic_miss_total_time->thread_counter_value());
190 st->print_cr("Total: %5ldms (elapsed) %5ldms (thread)", total_elapsed_time_ms, total_thread_time_ms);
191 } else {
192 st->print_cr(" no data (UsePerfData is turned off)");
193 }
194 }
195
196 #include <math.h>
197
198 // Implementation of SharedRuntime
199
200 // For statistics
201 uint SharedRuntime::_ic_miss_ctr = 0;
202 uint SharedRuntime::_wrong_method_ctr = 0;
203 uint SharedRuntime::_resolve_static_ctr = 0;
204 uint SharedRuntime::_resolve_virtual_ctr = 0;
205 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
206
207 #ifndef PRODUCT
208 uint SharedRuntime::_implicit_null_throws = 0;
209 uint SharedRuntime::_implicit_div0_throws = 0;
210
211 int64_t SharedRuntime::_nof_normal_calls = 0;
212 int64_t SharedRuntime::_nof_inlined_calls = 0;
213 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
214 int64_t SharedRuntime::_nof_static_calls = 0;
215 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
216 int64_t SharedRuntime::_nof_interface_calls = 0;
217 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
218
219 uint SharedRuntime::_new_instance_ctr=0;
220 uint SharedRuntime::_new_array_ctr=0;
221 uint SharedRuntime::_multi2_ctr=0;
222 uint SharedRuntime::_multi3_ctr=0;
223 uint SharedRuntime::_multi4_ctr=0;
224 uint SharedRuntime::_multi5_ctr=0;
225 uint SharedRuntime::_mon_enter_stub_ctr=0;
226 uint SharedRuntime::_mon_exit_stub_ctr=0;
227 uint SharedRuntime::_mon_enter_ctr=0;
241 uint SharedRuntime::_unsafe_set_memory_ctr=0;
242
243 int SharedRuntime::_ICmiss_index = 0;
244 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
245 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
246
247
248 void SharedRuntime::trace_ic_miss(address at) {
249 for (int i = 0; i < _ICmiss_index; i++) {
250 if (_ICmiss_at[i] == at) {
251 _ICmiss_count[i]++;
252 return;
253 }
254 }
255 int index = _ICmiss_index++;
256 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
257 _ICmiss_at[index] = at;
258 _ICmiss_count[index] = 1;
259 }
260
261 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
262 if (ICMissHistogram) {
263 st->print_cr("IC Miss Histogram:");
264 int tot_misses = 0;
265 for (int i = 0; i < _ICmiss_index; i++) {
266 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
267 tot_misses += _ICmiss_count[i];
268 }
269 st->print_cr("Total IC misses: %7d", tot_misses);
270 }
271 }
272 #endif // !PRODUCT
273
274
275 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
276 return x * y;
277 JRT_END
278
279
280 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
281 if (x == min_jlong && y == CONST64(-1)) {
282 return x;
283 } else {
284 return x / y;
285 }
286 JRT_END
287
288
289 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
290 if (x == min_jlong && y == CONST64(-1)) {
291 return 0;
292 } else {
719 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
720 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
721 JNIHandles::destroy_local(vthread);
722 JRT_END
723 #endif // INCLUDE_JVMTI
724
725 // The interpreter code to call this tracing function is only
726 // called/generated when UL is on for redefine, class and has the right level
727 // and tags. Since obsolete methods are never compiled, we don't have
728 // to modify the compilers to generate calls to this function.
729 //
730 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
731 JavaThread* thread, Method* method))
732 if (method->is_obsolete()) {
733 // We are calling an obsolete method, but this is not necessarily
734 // an error. Our method could have been redefined just after we
735 // fetched the Method* from the constant pool.
736 ResourceMark rm;
737 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
738 }
739
740 LogStreamHandle(Trace, interpreter, bytecode) log;
741 if (log.is_enabled()) {
742 ResourceMark rm;
743 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
744 p2i(thread),
745 (method->is_static() ? "static" : "virtual"),
746 method->name_and_sig_as_C_string(),
747 (method->is_native() ? " native" : ""),
748 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
749 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
750 }
751 return 0;
752 JRT_END
753
754 // ret_pc points into caller; we are returning caller's exception handler
755 // for given exception
756 // Note that the implementation of this method assumes it's only called when an exception has actually occured
757 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
758 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
759 assert(nm != nullptr, "must exist");
760 ResourceMark rm;
761
762 #if INCLUDE_JVMCI
763 if (nm->is_compiled_by_jvmci()) {
764 // lookup exception handler for this pc
765 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
766 ExceptionHandlerTable table(nm);
767 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
768 if (t != nullptr) {
769 return nm->code_begin() + t->pco();
770 } else {
1370
1371 // determine call info & receiver
1372 // note: a) receiver is null for static calls
1373 // b) an exception is thrown if receiver is null for non-static calls
1374 CallInfo call_info;
1375 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1376 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1377
1378 NoSafepointVerifier nsv;
1379
1380 methodHandle callee_method(current, call_info.selected_method());
1381
1382 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1383 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1384 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1385 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1386 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1387
1388 assert(!caller_nm->is_unloading(), "It should not be unloading");
1389
1390 // tracing/debugging/statistics
1391 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1392 (is_virtual) ? (&_resolve_virtual_ctr) :
1393 (&_resolve_static_ctr);
1394 Atomic::inc(addr);
1395
1396 #ifndef PRODUCT
1397 if (TraceCallFixup) {
1398 ResourceMark rm(current);
1399 tty->print("resolving %s%s (%s) call to",
1400 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1401 Bytecodes::name(invoke_code));
1402 callee_method->print_short_name(tty);
1403 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1404 p2i(caller_frame.pc()), p2i(callee_method->code()));
1405 }
1406 #endif
1407
1408 if (invoke_code == Bytecodes::_invokestatic) {
1409 assert(callee_method->method_holder()->is_initialized() ||
1410 callee_method->method_holder()->is_reentrant_initialization(current),
1411 "invalid class initialization state for invoke_static");
1412 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1413 // In order to keep class initialization check, do not patch call
1414 // site for static call when the class is not fully initialized.
1415 // Proper check is enforced by call site re-resolution on every invocation.
1416 //
1432
1433 // Make sure the callee nmethod does not get deoptimized and removed before
1434 // we are done patching the code.
1435
1436
1437 CompiledICLocker ml(caller_nm);
1438 if (is_virtual && !is_optimized) {
1439 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1440 inline_cache->update(&call_info, receiver->klass());
1441 } else {
1442 // Callsite is a direct call - set it to the destination method
1443 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1444 callsite->set(callee_method);
1445 }
1446
1447 return callee_method;
1448 }
1449
1450 // Inline caches exist only in compiled code
1451 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1452 PerfTraceTime timer(_perf_ic_miss_total_time);
1453
1454 #ifdef ASSERT
1455 RegisterMap reg_map(current,
1456 RegisterMap::UpdateMap::skip,
1457 RegisterMap::ProcessFrames::include,
1458 RegisterMap::WalkContinuation::skip);
1459 frame stub_frame = current->last_frame();
1460 assert(stub_frame.is_runtime_frame(), "sanity check");
1461 frame caller_frame = stub_frame.sender(®_map);
1462 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1463 #endif /* ASSERT */
1464
1465 methodHandle callee_method;
1466 JRT_BLOCK
1467 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1468 // Return Method* through TLS
1469 current->set_vm_result_2(callee_method());
1470 JRT_BLOCK_END
1471 // return compiled code entry point after potential safepoints
1472 return get_resolved_entry(current, callee_method);
1473 JRT_END
1474
1475
1476 // Handle call site that has been made non-entrant
1477 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1478 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1479
1480 // 6243940 We might end up in here if the callee is deoptimized
1481 // as we race to call it. We don't want to take a safepoint if
1482 // the caller was interpreted because the caller frame will look
1483 // interpreted to the stack walkers and arguments are now
1484 // "compiled" so it is much better to make this transition
1485 // invisible to the stack walking code. The i2c path will
1486 // place the callee method in the callee_target. It is stashed
1487 // there because if we try and find the callee by normal means a
1488 // safepoint is possible and have trouble gc'ing the compiled args.
1489 RegisterMap reg_map(current,
1490 RegisterMap::UpdateMap::skip,
1491 RegisterMap::ProcessFrames::include,
1492 RegisterMap::WalkContinuation::skip);
1493 frame stub_frame = current->last_frame();
1494 assert(stub_frame.is_runtime_frame(), "sanity check");
1495 frame caller_frame = stub_frame.sender(®_map);
1496
1497 if (caller_frame.is_interpreted_frame() ||
1498 caller_frame.is_entry_frame() ||
1499 caller_frame.is_upcall_stub_frame()) {
1512 // so bypassing it in c2i adapter is benign.
1513 return callee->get_c2i_no_clinit_check_entry();
1514 } else {
1515 return callee->get_c2i_entry();
1516 }
1517 }
1518
1519 // Must be compiled to compiled path which is safe to stackwalk
1520 methodHandle callee_method;
1521 JRT_BLOCK
1522 // Force resolving of caller (if we called from compiled frame)
1523 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1524 current->set_vm_result_2(callee_method());
1525 JRT_BLOCK_END
1526 // return compiled code entry point after potential safepoints
1527 return get_resolved_entry(current, callee_method);
1528 JRT_END
1529
1530 // Handle abstract method call
1531 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1532 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1533
1534 // Verbose error message for AbstractMethodError.
1535 // Get the called method from the invoke bytecode.
1536 vframeStream vfst(current, true);
1537 assert(!vfst.at_end(), "Java frame must exist");
1538 methodHandle caller(current, vfst.method());
1539 Bytecode_invoke invoke(caller, vfst.bci());
1540 DEBUG_ONLY( invoke.verify(); )
1541
1542 // Find the compiled caller frame.
1543 RegisterMap reg_map(current,
1544 RegisterMap::UpdateMap::include,
1545 RegisterMap::ProcessFrames::include,
1546 RegisterMap::WalkContinuation::skip);
1547 frame stubFrame = current->last_frame();
1548 assert(stubFrame.is_runtime_frame(), "must be");
1549 frame callerFrame = stubFrame.sender(®_map);
1550 assert(callerFrame.is_compiled_frame(), "must be");
1551
1552 // Install exception and return forward entry.
1553 address res = StubRoutines::throw_AbstractMethodError_entry();
1560 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1561 }
1562 JRT_BLOCK_END
1563 return res;
1564 JRT_END
1565
1566 // return verified_code_entry if interp_only_mode is not set for the current thread;
1567 // otherwise return c2i entry.
1568 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1569 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1570 // In interp_only_mode we need to go to the interpreted entry
1571 // The c2i won't patch in this mode -- see fixup_callers_callsite
1572 return callee_method->get_c2i_entry();
1573 }
1574 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1575 return callee_method->verified_code_entry();
1576 }
1577
1578 // resolve a static call and patch code
1579 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1580 PerfTraceTime timer(_perf_resolve_static_total_time);
1581
1582 methodHandle callee_method;
1583 bool enter_special = false;
1584 JRT_BLOCK
1585 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1586 current->set_vm_result_2(callee_method());
1587 JRT_BLOCK_END
1588 // return compiled code entry point after potential safepoints
1589 return get_resolved_entry(current, callee_method);
1590 JRT_END
1591
1592 // resolve virtual call and update inline cache to monomorphic
1593 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1594 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1595
1596 methodHandle callee_method;
1597 JRT_BLOCK
1598 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1599 current->set_vm_result_2(callee_method());
1600 JRT_BLOCK_END
1601 // return compiled code entry point after potential safepoints
1602 return get_resolved_entry(current, callee_method);
1603 JRT_END
1604
1605
1606 // Resolve a virtual call that can be statically bound (e.g., always
1607 // monomorphic, so it has no inline cache). Patch code to resolved target.
1608 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1609 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1610
1611 methodHandle callee_method;
1612 JRT_BLOCK
1613 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1614 current->set_vm_result_2(callee_method());
1615 JRT_BLOCK_END
1616 // return compiled code entry point after potential safepoints
1617 return get_resolved_entry(current, callee_method);
1618 JRT_END
1619
1620 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1621 JavaThread* current = THREAD;
1622 ResourceMark rm(current);
1623 CallInfo call_info;
1624 Bytecodes::Code bc;
1625
1626 // receiver is null for static calls. An exception is thrown for null
1627 // receivers for non-static calls
1628 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1629
1630 methodHandle callee_method(current, call_info.selected_method());
1631
1632 Atomic::inc(&_ic_miss_ctr);
1633
1634 #ifndef PRODUCT
1635 // Statistics & Tracing
1636 if (TraceCallFixup) {
1637 ResourceMark rm(current);
1638 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1639 callee_method->print_short_name(tty);
1640 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1641 }
1642
1643 if (ICMissHistogram) {
1644 MutexLocker m(VMStatistic_lock);
1645 RegisterMap reg_map(current,
1646 RegisterMap::UpdateMap::skip,
1647 RegisterMap::ProcessFrames::include,
1648 RegisterMap::WalkContinuation::skip);
1649 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1650 // produce statistics under the lock
1651 trace_ic_miss(f.pc());
1652 }
1653 #endif
1654
1737 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1738 cdc->set_to_clean();
1739 break;
1740 }
1741
1742 case relocInfo::virtual_call_type: {
1743 // compiled, dispatched call (which used to call an interpreted method)
1744 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1745 inline_cache->set_to_clean();
1746 break;
1747 }
1748 default:
1749 break;
1750 }
1751 }
1752 }
1753 }
1754
1755 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1756
1757 Atomic::inc(&_wrong_method_ctr);
1758
1759 #ifndef PRODUCT
1760 if (TraceCallFixup) {
1761 ResourceMark rm(current);
1762 tty->print("handle_wrong_method reresolving call to");
1763 callee_method->print_short_name(tty);
1764 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1765 }
1766 #endif
1767
1768 return callee_method;
1769 }
1770
1771 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1772 // The faulting unsafe accesses should be changed to throw the error
1773 // synchronously instead. Meanwhile the faulting instruction will be
1774 // skipped over (effectively turning it into a no-op) and an
1775 // asynchronous exception will be raised which the thread will
1776 // handle at a later point. If the instruction is a load it will
1777 // return garbage.
1778
1779 // Request an async exception.
2017 // This is only called when CheckJNICalls is true, and only
2018 // for virtual thread termination.
2019 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2020 assert(CheckJNICalls, "Only call this when checking JNI usage");
2021 if (log_is_enabled(Debug, jni)) {
2022 JavaThread* current = JavaThread::current();
2023 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2024 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2025 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2026 ") exiting with Objects still locked by JNI MonitorEnter.",
2027 vthread_id, carrier_id);
2028 }
2029 JRT_END
2030
2031 #ifndef PRODUCT
2032
2033 void SharedRuntime::print_statistics() {
2034 ttyLocker ttyl;
2035 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2036
2037 SharedRuntime::print_ic_miss_histogram_on(tty);
2038 SharedRuntime::print_counters_on(tty);
2039 AdapterHandlerLibrary::print_statistics_on(tty);
2040
2041 if (xtty != nullptr) xtty->tail("statistics");
2042 }
2043
2044 //void SharedRuntime::print_counters_on(outputStream* st) {
2045 // // Dump the JRT_ENTRY counters
2046 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2047 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2048 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2049 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2050 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2051 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2052 //
2053 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2054 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2055 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2056 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2057 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2058 //
2059 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2060 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2061 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2062 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2063 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2064 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2065 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2066 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2067 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2068 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2069 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2070 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2071 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2072 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2073 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2074 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2075 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2076 //}
2077
2078 inline double percent(int64_t x, int64_t y) {
2079 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2080 }
2081
2082 class MethodArityHistogram {
2083 public:
2084 enum { MAX_ARITY = 256 };
2085 private:
2086 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2087 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2088 static uint64_t _total_compiled_calls;
2089 static uint64_t _max_compiled_calls_per_method;
2090 static int _max_arity; // max. arity seen
2091 static int _max_size; // max. arg size seen
2092
2093 static void add_method_to_histogram(nmethod* nm) {
2094 Method* method = (nm == nullptr) ? nullptr : nm->method();
2095 if (method != nullptr) {
2096 ArgumentCount args(method->signature());
2097 int arity = args.size() + (method->is_static() ? 0 : 1);
2142 // Take the Compile_lock to protect against changes in the CodeBlob structures
2143 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2144 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2145 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2146 _max_arity = _max_size = 0;
2147 _total_compiled_calls = 0;
2148 _max_compiled_calls_per_method = 0;
2149 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2150 CodeCache::nmethods_do(add_method_to_histogram);
2151 print_histogram();
2152 }
2153 };
2154
2155 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2156 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2157 uint64_t MethodArityHistogram::_total_compiled_calls;
2158 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2159 int MethodArityHistogram::_max_arity;
2160 int MethodArityHistogram::_max_size;
2161
2162 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2163 tty->print_cr("Calls from compiled code:");
2164 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2165 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2166 int64_t mono_i = _nof_interface_calls;
2167 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2168 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2169 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2170 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2171 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2172 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2173 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2174 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2175 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2176 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2177 tty->cr();
2178 tty->print_cr("Note 1: counter updates are not MT-safe.");
2179 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2180 tty->print_cr(" %% in nested categories are relative to their category");
2181 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2182 tty->cr();
2395 AdapterFingerPrint::equals>;
2396 static AdapterHandlerTable* _adapter_handler_table;
2397
2398 // Find a entry with the same fingerprint if it exists
2399 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2400 NOT_PRODUCT(_lookups++);
2401 assert_lock_strong(AdapterHandlerLibrary_lock);
2402 AdapterFingerPrint fp(total_args_passed, sig_bt);
2403 AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2404 if (entry != nullptr) {
2405 #ifndef PRODUCT
2406 if (fp.is_compact()) _compact++;
2407 _hits++;
2408 #endif
2409 return *entry;
2410 }
2411 return nullptr;
2412 }
2413
2414 #ifndef PRODUCT
2415 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2416 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2417 return sizeof(*key) + sizeof(*a);
2418 };
2419 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2420 ts.print(st, "AdapterHandlerTable");
2421 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2422 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2423 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2424 _lookups, _equals, _hits, _compact);
2425 }
2426 #endif // !PRODUCT
2427
2428 // ---------------------------------------------------------------------------
2429 // Implementation of AdapterHandlerLibrary
2430 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2431 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2432 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2433 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2434 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2435 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2436 const int AdapterHandlerLibrary_size = 16*K;
2437 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2438
2439 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2440 return _buffer;
2441 }
2442
2443 static void post_adapter_creation(const AdapterBlob* new_adapter,
2444 const AdapterHandlerEntry* entry) {
2445 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2446 char blob_id[256];
3092 assert(found, "Should have found handler");
3093 }
3094
3095 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3096 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3097 if (get_i2c_entry() != nullptr) {
3098 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3099 }
3100 if (get_c2i_entry() != nullptr) {
3101 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3102 }
3103 if (get_c2i_unverified_entry() != nullptr) {
3104 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3105 }
3106 if (get_c2i_no_clinit_check_entry() != nullptr) {
3107 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3108 }
3109 st->cr();
3110 }
3111
3112 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3113 assert(current == JavaThread::current(), "pre-condition");
3114 StackOverflow* overflow_state = current->stack_overflow_state();
3115 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3116 overflow_state->set_reserved_stack_activation(current->stack_base());
3117 JRT_END
3118
3119 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3120 ResourceMark rm(current);
3121 frame activation;
3122 nmethod* nm = nullptr;
3123 int count = 1;
3124
3125 assert(fr.is_java_frame(), "Must start on Java frame");
3126
3127 RegisterMap map(JavaThread::current(),
3128 RegisterMap::UpdateMap::skip,
3129 RegisterMap::ProcessFrames::skip,
3130 RegisterMap::WalkContinuation::skip); // don't walk continuations
3131 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|