49 #include "metaprogramming/primitiveConversions.hpp"
50 #include "oops/klass.hpp"
51 #include "oops/method.inline.hpp"
52 #include "oops/objArrayKlass.hpp"
53 #include "oops/oop.inline.hpp"
54 #include "prims/forte.hpp"
55 #include "prims/jvmtiExport.hpp"
56 #include "prims/jvmtiThreadState.hpp"
57 #include "prims/methodHandles.hpp"
58 #include "prims/nativeLookup.hpp"
59 #include "runtime/arguments.hpp"
60 #include "runtime/atomic.hpp"
61 #include "runtime/basicLock.inline.hpp"
62 #include "runtime/frame.inline.hpp"
63 #include "runtime/handles.inline.hpp"
64 #include "runtime/init.hpp"
65 #include "runtime/interfaceSupport.inline.hpp"
66 #include "runtime/java.hpp"
67 #include "runtime/javaCalls.hpp"
68 #include "runtime/jniHandles.inline.hpp"
69 #include "runtime/perfData.hpp"
70 #include "runtime/sharedRuntime.hpp"
71 #include "runtime/stackWatermarkSet.hpp"
72 #include "runtime/stubRoutines.hpp"
73 #include "runtime/synchronizer.inline.hpp"
74 #include "runtime/timerTrace.hpp"
75 #include "runtime/vframe.inline.hpp"
76 #include "runtime/vframeArray.hpp"
77 #include "runtime/vm_version.hpp"
78 #include "utilities/copy.hpp"
79 #include "utilities/dtrace.hpp"
80 #include "utilities/events.hpp"
81 #include "utilities/globalDefinitions.hpp"
82 #include "utilities/resourceHash.hpp"
83 #include "utilities/macros.hpp"
84 #include "utilities/xmlstream.hpp"
85 #ifdef COMPILER1
86 #include "c1/c1_Runtime1.hpp"
87 #endif
88 #if INCLUDE_JFR
89 #include "jfr/jfr.hpp"
90 #endif
91
92 // Shared runtime stub routines reside in their own unique blob with a
93 // single entry point
94
95
96 #define SHARED_STUB_FIELD_DEFINE(name, type) \
97 type SharedRuntime::BLOB_FIELD_NAME(name);
98 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
99 #undef SHARED_STUB_FIELD_DEFINE
100
101 nmethod* SharedRuntime::_cont_doYield_stub;
102
103 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
104 const char *SharedRuntime::_stub_names[] = {
105 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
106 };
107
108 //----------------------------generate_stubs-----------------------------------
109 void SharedRuntime::generate_initial_stubs() {
110 // Build this early so it's available for the interpreter.
111 _throw_StackOverflowError_blob =
112 generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
113 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
114 }
115
116 void SharedRuntime::generate_stubs() {
117 _wrong_method_blob =
118 generate_resolve_blob(SharedStubId::wrong_method_id,
119 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
120 _wrong_method_abstract_blob =
121 generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
122 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));
151
152 AdapterHandlerLibrary::initialize();
153
154 #if COMPILER2_OR_JVMCI
155 // Vectors are generated only by C2 and JVMCI.
156 bool support_wide = is_wide_vector(MaxVectorSize);
157 if (support_wide) {
158 _polling_page_vectors_safepoint_handler_blob =
159 generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
160 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
161 }
162 #endif // COMPILER2_OR_JVMCI
163 _polling_page_safepoint_handler_blob =
164 generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
165 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
166 _polling_page_return_handler_blob =
167 generate_handler_blob(SharedStubId::polling_page_return_handler_id,
168 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
169
170 generate_deopt_blob();
171 }
172
173 #if INCLUDE_JFR
174 //------------------------------generate jfr runtime stubs ------
175 void SharedRuntime::generate_jfr_stubs() {
176 ResourceMark rm;
177 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
178 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
179
180 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
181 _jfr_return_lease_blob = generate_jfr_return_lease();
182 }
183
184 #endif // INCLUDE_JFR
185
186 #include <math.h>
187
188 // Implementation of SharedRuntime
189
190 #ifndef PRODUCT
191 // For statistics
192 uint SharedRuntime::_ic_miss_ctr = 0;
193 uint SharedRuntime::_wrong_method_ctr = 0;
194 uint SharedRuntime::_resolve_static_ctr = 0;
195 uint SharedRuntime::_resolve_virtual_ctr = 0;
196 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
197 uint SharedRuntime::_implicit_null_throws = 0;
198 uint SharedRuntime::_implicit_div0_throws = 0;
199
200 int64_t SharedRuntime::_nof_normal_calls = 0;
201 int64_t SharedRuntime::_nof_inlined_calls = 0;
202 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
203 int64_t SharedRuntime::_nof_static_calls = 0;
204 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
205 int64_t SharedRuntime::_nof_interface_calls = 0;
206 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
207
208 uint SharedRuntime::_new_instance_ctr=0;
209 uint SharedRuntime::_new_array_ctr=0;
210 uint SharedRuntime::_multi2_ctr=0;
211 uint SharedRuntime::_multi3_ctr=0;
212 uint SharedRuntime::_multi4_ctr=0;
213 uint SharedRuntime::_multi5_ctr=0;
214 uint SharedRuntime::_mon_enter_stub_ctr=0;
215 uint SharedRuntime::_mon_exit_stub_ctr=0;
216 uint SharedRuntime::_mon_enter_ctr=0;
230 uint SharedRuntime::_unsafe_set_memory_ctr=0;
231
232 int SharedRuntime::_ICmiss_index = 0;
233 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
234 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
235
236
237 void SharedRuntime::trace_ic_miss(address at) {
238 for (int i = 0; i < _ICmiss_index; i++) {
239 if (_ICmiss_at[i] == at) {
240 _ICmiss_count[i]++;
241 return;
242 }
243 }
244 int index = _ICmiss_index++;
245 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
246 _ICmiss_at[index] = at;
247 _ICmiss_count[index] = 1;
248 }
249
250 void SharedRuntime::print_ic_miss_histogram() {
251 if (ICMissHistogram) {
252 tty->print_cr("IC Miss Histogram:");
253 int tot_misses = 0;
254 for (int i = 0; i < _ICmiss_index; i++) {
255 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
256 tot_misses += _ICmiss_count[i];
257 }
258 tty->print_cr("Total IC misses: %7d", tot_misses);
259 }
260 }
261 #endif // PRODUCT
262
263
264 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
265 return x * y;
266 JRT_END
267
268
269 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
270 if (x == min_jlong && y == CONST64(-1)) {
271 return x;
272 } else {
273 return x / y;
274 }
275 JRT_END
276
277
278 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
279 if (x == min_jlong && y == CONST64(-1)) {
280 return 0;
281 } else {
708 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
709 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
710 JNIHandles::destroy_local(vthread);
711 JRT_END
712 #endif // INCLUDE_JVMTI
713
714 // The interpreter code to call this tracing function is only
715 // called/generated when UL is on for redefine, class and has the right level
716 // and tags. Since obsolete methods are never compiled, we don't have
717 // to modify the compilers to generate calls to this function.
718 //
719 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
720 JavaThread* thread, Method* method))
721 if (method->is_obsolete()) {
722 // We are calling an obsolete method, but this is not necessarily
723 // an error. Our method could have been redefined just after we
724 // fetched the Method* from the constant pool.
725 ResourceMark rm;
726 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
727 }
728 return 0;
729 JRT_END
730
731 // ret_pc points into caller; we are returning caller's exception handler
732 // for given exception
733 // Note that the implementation of this method assumes it's only called when an exception has actually occured
734 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
735 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
736 assert(nm != nullptr, "must exist");
737 ResourceMark rm;
738
739 #if INCLUDE_JVMCI
740 if (nm->is_compiled_by_jvmci()) {
741 // lookup exception handler for this pc
742 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
743 ExceptionHandlerTable table(nm);
744 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
745 if (t != nullptr) {
746 return nm->code_begin() + t->pco();
747 } else {
1347
1348 // determine call info & receiver
1349 // note: a) receiver is null for static calls
1350 // b) an exception is thrown if receiver is null for non-static calls
1351 CallInfo call_info;
1352 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1353 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1354
1355 NoSafepointVerifier nsv;
1356
1357 methodHandle callee_method(current, call_info.selected_method());
1358
1359 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1360 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1361 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1362 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1363 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1364
1365 assert(!caller_nm->is_unloading(), "It should not be unloading");
1366
1367 #ifndef PRODUCT
1368 // tracing/debugging/statistics
1369 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1370 (is_virtual) ? (&_resolve_virtual_ctr) :
1371 (&_resolve_static_ctr);
1372 Atomic::inc(addr);
1373
1374 if (TraceCallFixup) {
1375 ResourceMark rm(current);
1376 tty->print("resolving %s%s (%s) call to",
1377 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1378 Bytecodes::name(invoke_code));
1379 callee_method->print_short_name(tty);
1380 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1381 p2i(caller_frame.pc()), p2i(callee_method->code()));
1382 }
1383 #endif
1384
1385 if (invoke_code == Bytecodes::_invokestatic) {
1386 assert(callee_method->method_holder()->is_initialized() ||
1387 callee_method->method_holder()->is_reentrant_initialization(current),
1388 "invalid class initialization state for invoke_static");
1389 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1390 // In order to keep class initialization check, do not patch call
1391 // site for static call when the class is not fully initialized.
1392 // Proper check is enforced by call site re-resolution on every invocation.
1393 //
1409
1410 // Make sure the callee nmethod does not get deoptimized and removed before
1411 // we are done patching the code.
1412
1413
1414 CompiledICLocker ml(caller_nm);
1415 if (is_virtual && !is_optimized) {
1416 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1417 inline_cache->update(&call_info, receiver->klass());
1418 } else {
1419 // Callsite is a direct call - set it to the destination method
1420 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1421 callsite->set(callee_method);
1422 }
1423
1424 return callee_method;
1425 }
1426
1427 // Inline caches exist only in compiled code
1428 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1429 #ifdef ASSERT
1430 RegisterMap reg_map(current,
1431 RegisterMap::UpdateMap::skip,
1432 RegisterMap::ProcessFrames::include,
1433 RegisterMap::WalkContinuation::skip);
1434 frame stub_frame = current->last_frame();
1435 assert(stub_frame.is_runtime_frame(), "sanity check");
1436 frame caller_frame = stub_frame.sender(®_map);
1437 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1438 #endif /* ASSERT */
1439
1440 methodHandle callee_method;
1441 JRT_BLOCK
1442 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1443 // Return Method* through TLS
1444 current->set_vm_result_2(callee_method());
1445 JRT_BLOCK_END
1446 // return compiled code entry point after potential safepoints
1447 return get_resolved_entry(current, callee_method);
1448 JRT_END
1449
1450
1451 // Handle call site that has been made non-entrant
1452 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1453 // 6243940 We might end up in here if the callee is deoptimized
1454 // as we race to call it. We don't want to take a safepoint if
1455 // the caller was interpreted because the caller frame will look
1456 // interpreted to the stack walkers and arguments are now
1457 // "compiled" so it is much better to make this transition
1458 // invisible to the stack walking code. The i2c path will
1459 // place the callee method in the callee_target. It is stashed
1460 // there because if we try and find the callee by normal means a
1461 // safepoint is possible and have trouble gc'ing the compiled args.
1462 RegisterMap reg_map(current,
1463 RegisterMap::UpdateMap::skip,
1464 RegisterMap::ProcessFrames::include,
1465 RegisterMap::WalkContinuation::skip);
1466 frame stub_frame = current->last_frame();
1467 assert(stub_frame.is_runtime_frame(), "sanity check");
1468 frame caller_frame = stub_frame.sender(®_map);
1469
1470 if (caller_frame.is_interpreted_frame() ||
1471 caller_frame.is_entry_frame() ||
1472 caller_frame.is_upcall_stub_frame()) {
1485 // so bypassing it in c2i adapter is benign.
1486 return callee->get_c2i_no_clinit_check_entry();
1487 } else {
1488 return callee->get_c2i_entry();
1489 }
1490 }
1491
1492 // Must be compiled to compiled path which is safe to stackwalk
1493 methodHandle callee_method;
1494 JRT_BLOCK
1495 // Force resolving of caller (if we called from compiled frame)
1496 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1497 current->set_vm_result_2(callee_method());
1498 JRT_BLOCK_END
1499 // return compiled code entry point after potential safepoints
1500 return get_resolved_entry(current, callee_method);
1501 JRT_END
1502
1503 // Handle abstract method call
1504 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1505 // Verbose error message for AbstractMethodError.
1506 // Get the called method from the invoke bytecode.
1507 vframeStream vfst(current, true);
1508 assert(!vfst.at_end(), "Java frame must exist");
1509 methodHandle caller(current, vfst.method());
1510 Bytecode_invoke invoke(caller, vfst.bci());
1511 DEBUG_ONLY( invoke.verify(); )
1512
1513 // Find the compiled caller frame.
1514 RegisterMap reg_map(current,
1515 RegisterMap::UpdateMap::include,
1516 RegisterMap::ProcessFrames::include,
1517 RegisterMap::WalkContinuation::skip);
1518 frame stubFrame = current->last_frame();
1519 assert(stubFrame.is_runtime_frame(), "must be");
1520 frame callerFrame = stubFrame.sender(®_map);
1521 assert(callerFrame.is_compiled_frame(), "must be");
1522
1523 // Install exception and return forward entry.
1524 address res = SharedRuntime::throw_AbstractMethodError_entry();
1531 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1532 }
1533 JRT_BLOCK_END
1534 return res;
1535 JRT_END
1536
1537 // return verified_code_entry if interp_only_mode is not set for the current thread;
1538 // otherwise return c2i entry.
1539 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1540 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1541 // In interp_only_mode we need to go to the interpreted entry
1542 // The c2i won't patch in this mode -- see fixup_callers_callsite
1543 return callee_method->get_c2i_entry();
1544 }
1545 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1546 return callee_method->verified_code_entry();
1547 }
1548
1549 // resolve a static call and patch code
1550 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1551 methodHandle callee_method;
1552 bool enter_special = false;
1553 JRT_BLOCK
1554 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1555 current->set_vm_result_2(callee_method());
1556 JRT_BLOCK_END
1557 // return compiled code entry point after potential safepoints
1558 return get_resolved_entry(current, callee_method);
1559 JRT_END
1560
1561 // resolve virtual call and update inline cache to monomorphic
1562 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1563 methodHandle callee_method;
1564 JRT_BLOCK
1565 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1566 current->set_vm_result_2(callee_method());
1567 JRT_BLOCK_END
1568 // return compiled code entry point after potential safepoints
1569 return get_resolved_entry(current, callee_method);
1570 JRT_END
1571
1572
1573 // Resolve a virtual call that can be statically bound (e.g., always
1574 // monomorphic, so it has no inline cache). Patch code to resolved target.
1575 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1576 methodHandle callee_method;
1577 JRT_BLOCK
1578 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1579 current->set_vm_result_2(callee_method());
1580 JRT_BLOCK_END
1581 // return compiled code entry point after potential safepoints
1582 return get_resolved_entry(current, callee_method);
1583 JRT_END
1584
1585 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1586 JavaThread* current = THREAD;
1587 ResourceMark rm(current);
1588 CallInfo call_info;
1589 Bytecodes::Code bc;
1590
1591 // receiver is null for static calls. An exception is thrown for null
1592 // receivers for non-static calls
1593 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1594
1595 methodHandle callee_method(current, call_info.selected_method());
1596
1597 #ifndef PRODUCT
1598 Atomic::inc(&_ic_miss_ctr);
1599
1600 // Statistics & Tracing
1601 if (TraceCallFixup) {
1602 ResourceMark rm(current);
1603 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1604 callee_method->print_short_name(tty);
1605 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1606 }
1607
1608 if (ICMissHistogram) {
1609 MutexLocker m(VMStatistic_lock);
1610 RegisterMap reg_map(current,
1611 RegisterMap::UpdateMap::skip,
1612 RegisterMap::ProcessFrames::include,
1613 RegisterMap::WalkContinuation::skip);
1614 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1615 // produce statistics under the lock
1616 trace_ic_miss(f.pc());
1617 }
1618 #endif
1619
1702 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1703 cdc->set_to_clean();
1704 break;
1705 }
1706
1707 case relocInfo::virtual_call_type: {
1708 // compiled, dispatched call (which used to call an interpreted method)
1709 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1710 inline_cache->set_to_clean();
1711 break;
1712 }
1713 default:
1714 break;
1715 }
1716 }
1717 }
1718 }
1719
1720 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1721
1722
1723 #ifndef PRODUCT
1724 Atomic::inc(&_wrong_method_ctr);
1725
1726 if (TraceCallFixup) {
1727 ResourceMark rm(current);
1728 tty->print("handle_wrong_method reresolving call to");
1729 callee_method->print_short_name(tty);
1730 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1731 }
1732 #endif
1733
1734 return callee_method;
1735 }
1736
1737 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1738 // The faulting unsafe accesses should be changed to throw the error
1739 // synchronously instead. Meanwhile the faulting instruction will be
1740 // skipped over (effectively turning it into a no-op) and an
1741 // asynchronous exception will be raised which the thread will
1742 // handle at a later point. If the instruction is a load it will
1743 // return garbage.
1744
1745 // Request an async exception.
2003 // This is only called when CheckJNICalls is true, and only
2004 // for virtual thread termination.
2005 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2006 assert(CheckJNICalls, "Only call this when checking JNI usage");
2007 if (log_is_enabled(Debug, jni)) {
2008 JavaThread* current = JavaThread::current();
2009 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2010 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2011 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2012 ") exiting with Objects still locked by JNI MonitorEnter.",
2013 vthread_id, carrier_id);
2014 }
2015 JRT_END
2016
2017 #ifndef PRODUCT
2018
2019 void SharedRuntime::print_statistics() {
2020 ttyLocker ttyl;
2021 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2022
2023 SharedRuntime::print_ic_miss_histogram();
2024
2025 // Dump the JRT_ENTRY counters
2026 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2027 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2028 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2029 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2030 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2031 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2032
2033 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2034 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2035 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2036 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2037 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2038
2039 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2040 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2041 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2042 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2043 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2044 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2045 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2046 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2047 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2048 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2049 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2050 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2051 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2052 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2053 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2054 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2055 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2056
2057 AdapterHandlerLibrary::print_statistics();
2058
2059 if (xtty != nullptr) xtty->tail("statistics");
2060 }
2061
2062 inline double percent(int64_t x, int64_t y) {
2063 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2064 }
2065
2066 class MethodArityHistogram {
2067 public:
2068 enum { MAX_ARITY = 256 };
2069 private:
2070 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2071 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2072 static uint64_t _total_compiled_calls;
2073 static uint64_t _max_compiled_calls_per_method;
2074 static int _max_arity; // max. arity seen
2075 static int _max_size; // max. arg size seen
2076
2077 static void add_method_to_histogram(nmethod* nm) {
2078 Method* method = (nm == nullptr) ? nullptr : nm->method();
2079 if (method != nullptr) {
2080 ArgumentCount args(method->signature());
2081 int arity = args.size() + (method->is_static() ? 0 : 1);
2126 // Take the Compile_lock to protect against changes in the CodeBlob structures
2127 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2128 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2129 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2130 _max_arity = _max_size = 0;
2131 _total_compiled_calls = 0;
2132 _max_compiled_calls_per_method = 0;
2133 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2134 CodeCache::nmethods_do(add_method_to_histogram);
2135 print_histogram();
2136 }
2137 };
2138
2139 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2140 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2141 uint64_t MethodArityHistogram::_total_compiled_calls;
2142 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2143 int MethodArityHistogram::_max_arity;
2144 int MethodArityHistogram::_max_size;
2145
2146 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2147 tty->print_cr("Calls from compiled code:");
2148 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2149 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2150 int64_t mono_i = _nof_interface_calls;
2151 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2152 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2153 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2154 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2155 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2156 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2157 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2158 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2159 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2160 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2161 tty->cr();
2162 tty->print_cr("Note 1: counter updates are not MT-safe.");
2163 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2164 tty->print_cr(" %% in nested categories are relative to their category");
2165 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2166 tty->cr();
2379 AdapterFingerPrint::equals>;
2380 static AdapterHandlerTable* _adapter_handler_table;
2381
2382 // Find a entry with the same fingerprint if it exists
2383 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2384 NOT_PRODUCT(_lookups++);
2385 assert_lock_strong(AdapterHandlerLibrary_lock);
2386 AdapterFingerPrint fp(total_args_passed, sig_bt);
2387 AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2388 if (entry != nullptr) {
2389 #ifndef PRODUCT
2390 if (fp.is_compact()) _compact++;
2391 _hits++;
2392 #endif
2393 return *entry;
2394 }
2395 return nullptr;
2396 }
2397
2398 #ifndef PRODUCT
2399 static void print_table_statistics() {
2400 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2401 return sizeof(*key) + sizeof(*a);
2402 };
2403 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2404 ts.print(tty, "AdapterHandlerTable");
2405 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2406 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2407 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2408 _lookups, _equals, _hits, _compact);
2409 }
2410 #endif
2411
2412 // ---------------------------------------------------------------------------
2413 // Implementation of AdapterHandlerLibrary
2414 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2415 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2416 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2417 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2418 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2419 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2420 const int AdapterHandlerLibrary_size = 16*K;
2421 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2422
2423 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2424 return _buffer;
2425 }
2426
2427 static void post_adapter_creation(const AdapterBlob* new_adapter,
2428 const AdapterHandlerEntry* entry) {
2429 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2430 char blob_id[256];
3078 assert(found, "Should have found handler");
3079 }
3080
3081 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3082 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3083 if (get_i2c_entry() != nullptr) {
3084 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3085 }
3086 if (get_c2i_entry() != nullptr) {
3087 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3088 }
3089 if (get_c2i_unverified_entry() != nullptr) {
3090 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3091 }
3092 if (get_c2i_no_clinit_check_entry() != nullptr) {
3093 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3094 }
3095 st->cr();
3096 }
3097
3098 #ifndef PRODUCT
3099
3100 void AdapterHandlerLibrary::print_statistics() {
3101 print_table_statistics();
3102 }
3103
3104 #endif /* PRODUCT */
3105
3106 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3107 assert(current == JavaThread::current(), "pre-condition");
3108 StackOverflow* overflow_state = current->stack_overflow_state();
3109 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3110 overflow_state->set_reserved_stack_activation(current->stack_base());
3111 JRT_END
3112
3113 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3114 ResourceMark rm(current);
3115 frame activation;
3116 nmethod* nm = nullptr;
3117 int count = 1;
3118
3119 assert(fr.is_java_frame(), "Must start on Java frame");
3120
3121 RegisterMap map(JavaThread::current(),
3122 RegisterMap::UpdateMap::skip,
3123 RegisterMap::ProcessFrames::skip,
3124 RegisterMap::WalkContinuation::skip); // don't walk continuations
3125 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
49 #include "metaprogramming/primitiveConversions.hpp"
50 #include "oops/klass.hpp"
51 #include "oops/method.inline.hpp"
52 #include "oops/objArrayKlass.hpp"
53 #include "oops/oop.inline.hpp"
54 #include "prims/forte.hpp"
55 #include "prims/jvmtiExport.hpp"
56 #include "prims/jvmtiThreadState.hpp"
57 #include "prims/methodHandles.hpp"
58 #include "prims/nativeLookup.hpp"
59 #include "runtime/arguments.hpp"
60 #include "runtime/atomic.hpp"
61 #include "runtime/basicLock.inline.hpp"
62 #include "runtime/frame.inline.hpp"
63 #include "runtime/handles.inline.hpp"
64 #include "runtime/init.hpp"
65 #include "runtime/interfaceSupport.inline.hpp"
66 #include "runtime/java.hpp"
67 #include "runtime/javaCalls.hpp"
68 #include "runtime/jniHandles.inline.hpp"
69 #include "runtime/perfData.inline.hpp"
70 #include "runtime/sharedRuntime.hpp"
71 #include "runtime/stackWatermarkSet.hpp"
72 #include "runtime/stubRoutines.hpp"
73 #include "runtime/synchronizer.inline.hpp"
74 #include "runtime/timerTrace.hpp"
75 #include "runtime/vframe.inline.hpp"
76 #include "runtime/vframeArray.hpp"
77 #include "runtime/vm_version.hpp"
78 #include "services/management.hpp"
79 #include "utilities/copy.hpp"
80 #include "utilities/dtrace.hpp"
81 #include "utilities/events.hpp"
82 #include "utilities/globalDefinitions.hpp"
83 #include "utilities/resourceHash.hpp"
84 #include "utilities/macros.hpp"
85 #include "utilities/xmlstream.hpp"
86 #ifdef COMPILER1
87 #include "c1/c1_Runtime1.hpp"
88 #endif
89 #if INCLUDE_JFR
90 #include "jfr/jfr.hpp"
91 #endif
92
93 // Shared runtime stub routines reside in their own unique blob with a
94 // single entry point
95
96
97 #define SHARED_STUB_FIELD_DEFINE(name, type) \
98 type SharedRuntime::BLOB_FIELD_NAME(name);
99 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
100 #undef SHARED_STUB_FIELD_DEFINE
101
102 nmethod* SharedRuntime::_cont_doYield_stub;
103
104 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
105 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
106 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
107 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
108 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
109
110 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
111 const char *SharedRuntime::_stub_names[] = {
112 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
113 };
114
115 //----------------------------generate_stubs-----------------------------------
116 void SharedRuntime::generate_initial_stubs() {
117 // Build this early so it's available for the interpreter.
118 _throw_StackOverflowError_blob =
119 generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
120 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
121 }
122
123 void SharedRuntime::generate_stubs() {
124 _wrong_method_blob =
125 generate_resolve_blob(SharedStubId::wrong_method_id,
126 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
127 _wrong_method_abstract_blob =
128 generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
129 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));
158
159 AdapterHandlerLibrary::initialize();
160
161 #if COMPILER2_OR_JVMCI
162 // Vectors are generated only by C2 and JVMCI.
163 bool support_wide = is_wide_vector(MaxVectorSize);
164 if (support_wide) {
165 _polling_page_vectors_safepoint_handler_blob =
166 generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
167 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
168 }
169 #endif // COMPILER2_OR_JVMCI
170 _polling_page_safepoint_handler_blob =
171 generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
172 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
173 _polling_page_return_handler_blob =
174 generate_handler_blob(SharedStubId::polling_page_return_handler_id,
175 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
176
177 generate_deopt_blob();
178
179 if (UsePerfData) {
180 EXCEPTION_MARK;
181 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
182 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
183 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
184 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
185 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
186 if (HAS_PENDING_EXCEPTION) {
187 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
188 }
189 }
190 }
191
192 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
193 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
194 if (TraceThreadTime) {
195 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
196 }
197 st->print(" / %5d events", cnt);
198 st->cr();
199 }
200
201 void SharedRuntime::print_counters_on(outputStream* st) {
202 st->print_cr("SharedRuntime:");
203 if (UsePerfData) {
204 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
205 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
206 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
207 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
208 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
209
210 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
211 _perf_resolve_virtual_total_time->elapsed_counter_value() +
212 _perf_resolve_static_total_time->elapsed_counter_value() +
213 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
214 _perf_ic_miss_total_time->elapsed_counter_value());
215 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
216 if (TraceThreadTime) {
217 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
218 _perf_resolve_virtual_total_time->thread_counter_value() +
219 _perf_resolve_static_total_time->thread_counter_value() +
220 _perf_handle_wrong_method_total_time->thread_counter_value() +
221 _perf_ic_miss_total_time->thread_counter_value());
222 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
223
224 }
225 st->cr();
226 } else {
227 st->print_cr(" no data (UsePerfData is turned off)");
228 }
229 }
230
231 #if INCLUDE_JFR
232 //------------------------------generate jfr runtime stubs ------
233 void SharedRuntime::generate_jfr_stubs() {
234 ResourceMark rm;
235 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
236 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
237
238 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
239 _jfr_return_lease_blob = generate_jfr_return_lease();
240 }
241
242 #endif // INCLUDE_JFR
243
244 #include <math.h>
245
246 // Implementation of SharedRuntime
247
248 // For statistics
249 uint SharedRuntime::_ic_miss_ctr = 0;
250 uint SharedRuntime::_wrong_method_ctr = 0;
251 uint SharedRuntime::_resolve_static_ctr = 0;
252 uint SharedRuntime::_resolve_virtual_ctr = 0;
253 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
254
255 #ifndef PRODUCT
256 uint SharedRuntime::_implicit_null_throws = 0;
257 uint SharedRuntime::_implicit_div0_throws = 0;
258
259 int64_t SharedRuntime::_nof_normal_calls = 0;
260 int64_t SharedRuntime::_nof_inlined_calls = 0;
261 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
262 int64_t SharedRuntime::_nof_static_calls = 0;
263 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
264 int64_t SharedRuntime::_nof_interface_calls = 0;
265 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
266
267 uint SharedRuntime::_new_instance_ctr=0;
268 uint SharedRuntime::_new_array_ctr=0;
269 uint SharedRuntime::_multi2_ctr=0;
270 uint SharedRuntime::_multi3_ctr=0;
271 uint SharedRuntime::_multi4_ctr=0;
272 uint SharedRuntime::_multi5_ctr=0;
273 uint SharedRuntime::_mon_enter_stub_ctr=0;
274 uint SharedRuntime::_mon_exit_stub_ctr=0;
275 uint SharedRuntime::_mon_enter_ctr=0;
289 uint SharedRuntime::_unsafe_set_memory_ctr=0;
290
291 int SharedRuntime::_ICmiss_index = 0;
292 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
293 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
294
295
296 void SharedRuntime::trace_ic_miss(address at) {
297 for (int i = 0; i < _ICmiss_index; i++) {
298 if (_ICmiss_at[i] == at) {
299 _ICmiss_count[i]++;
300 return;
301 }
302 }
303 int index = _ICmiss_index++;
304 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
305 _ICmiss_at[index] = at;
306 _ICmiss_count[index] = 1;
307 }
308
309 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
310 if (ICMissHistogram) {
311 st->print_cr("IC Miss Histogram:");
312 int tot_misses = 0;
313 for (int i = 0; i < _ICmiss_index; i++) {
314 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
315 tot_misses += _ICmiss_count[i];
316 }
317 st->print_cr("Total IC misses: %7d", tot_misses);
318 }
319 }
320 #endif // !PRODUCT
321
322
323 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
324 return x * y;
325 JRT_END
326
327
328 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
329 if (x == min_jlong && y == CONST64(-1)) {
330 return x;
331 } else {
332 return x / y;
333 }
334 JRT_END
335
336
337 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
338 if (x == min_jlong && y == CONST64(-1)) {
339 return 0;
340 } else {
767 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
768 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
769 JNIHandles::destroy_local(vthread);
770 JRT_END
771 #endif // INCLUDE_JVMTI
772
773 // The interpreter code to call this tracing function is only
774 // called/generated when UL is on for redefine, class and has the right level
775 // and tags. Since obsolete methods are never compiled, we don't have
776 // to modify the compilers to generate calls to this function.
777 //
778 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
779 JavaThread* thread, Method* method))
780 if (method->is_obsolete()) {
781 // We are calling an obsolete method, but this is not necessarily
782 // an error. Our method could have been redefined just after we
783 // fetched the Method* from the constant pool.
784 ResourceMark rm;
785 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
786 }
787
788 LogStreamHandle(Trace, interpreter, bytecode) log;
789 if (log.is_enabled()) {
790 ResourceMark rm;
791 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
792 p2i(thread),
793 (method->is_static() ? "static" : "virtual"),
794 method->name_and_sig_as_C_string(),
795 (method->is_native() ? " native" : ""),
796 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
797 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
798 }
799 return 0;
800 JRT_END
801
802 // ret_pc points into caller; we are returning caller's exception handler
803 // for given exception
804 // Note that the implementation of this method assumes it's only called when an exception has actually occured
805 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
806 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
807 assert(nm != nullptr, "must exist");
808 ResourceMark rm;
809
810 #if INCLUDE_JVMCI
811 if (nm->is_compiled_by_jvmci()) {
812 // lookup exception handler for this pc
813 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
814 ExceptionHandlerTable table(nm);
815 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
816 if (t != nullptr) {
817 return nm->code_begin() + t->pco();
818 } else {
1418
1419 // determine call info & receiver
1420 // note: a) receiver is null for static calls
1421 // b) an exception is thrown if receiver is null for non-static calls
1422 CallInfo call_info;
1423 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1424 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1425
1426 NoSafepointVerifier nsv;
1427
1428 methodHandle callee_method(current, call_info.selected_method());
1429
1430 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1431 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1432 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1433 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1434 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1435
1436 assert(!caller_nm->is_unloading(), "It should not be unloading");
1437
1438 // tracing/debugging/statistics
1439 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1440 (is_virtual) ? (&_resolve_virtual_ctr) :
1441 (&_resolve_static_ctr);
1442 Atomic::inc(addr);
1443
1444 #ifndef PRODUCT
1445 if (TraceCallFixup) {
1446 ResourceMark rm(current);
1447 tty->print("resolving %s%s (%s) call to",
1448 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1449 Bytecodes::name(invoke_code));
1450 callee_method->print_short_name(tty);
1451 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1452 p2i(caller_frame.pc()), p2i(callee_method->code()));
1453 }
1454 #endif
1455
1456 if (invoke_code == Bytecodes::_invokestatic) {
1457 assert(callee_method->method_holder()->is_initialized() ||
1458 callee_method->method_holder()->is_reentrant_initialization(current),
1459 "invalid class initialization state for invoke_static");
1460 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1461 // In order to keep class initialization check, do not patch call
1462 // site for static call when the class is not fully initialized.
1463 // Proper check is enforced by call site re-resolution on every invocation.
1464 //
1480
1481 // Make sure the callee nmethod does not get deoptimized and removed before
1482 // we are done patching the code.
1483
1484
1485 CompiledICLocker ml(caller_nm);
1486 if (is_virtual && !is_optimized) {
1487 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1488 inline_cache->update(&call_info, receiver->klass());
1489 } else {
1490 // Callsite is a direct call - set it to the destination method
1491 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1492 callsite->set(callee_method);
1493 }
1494
1495 return callee_method;
1496 }
1497
1498 // Inline caches exist only in compiled code
1499 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1500 PerfTraceTime timer(_perf_ic_miss_total_time);
1501
1502 #ifdef ASSERT
1503 RegisterMap reg_map(current,
1504 RegisterMap::UpdateMap::skip,
1505 RegisterMap::ProcessFrames::include,
1506 RegisterMap::WalkContinuation::skip);
1507 frame stub_frame = current->last_frame();
1508 assert(stub_frame.is_runtime_frame(), "sanity check");
1509 frame caller_frame = stub_frame.sender(®_map);
1510 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1511 #endif /* ASSERT */
1512
1513 methodHandle callee_method;
1514 JRT_BLOCK
1515 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1516 // Return Method* through TLS
1517 current->set_vm_result_2(callee_method());
1518 JRT_BLOCK_END
1519 // return compiled code entry point after potential safepoints
1520 return get_resolved_entry(current, callee_method);
1521 JRT_END
1522
1523
1524 // Handle call site that has been made non-entrant
1525 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1526 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1527
1528 // 6243940 We might end up in here if the callee is deoptimized
1529 // as we race to call it. We don't want to take a safepoint if
1530 // the caller was interpreted because the caller frame will look
1531 // interpreted to the stack walkers and arguments are now
1532 // "compiled" so it is much better to make this transition
1533 // invisible to the stack walking code. The i2c path will
1534 // place the callee method in the callee_target. It is stashed
1535 // there because if we try and find the callee by normal means a
1536 // safepoint is possible and have trouble gc'ing the compiled args.
1537 RegisterMap reg_map(current,
1538 RegisterMap::UpdateMap::skip,
1539 RegisterMap::ProcessFrames::include,
1540 RegisterMap::WalkContinuation::skip);
1541 frame stub_frame = current->last_frame();
1542 assert(stub_frame.is_runtime_frame(), "sanity check");
1543 frame caller_frame = stub_frame.sender(®_map);
1544
1545 if (caller_frame.is_interpreted_frame() ||
1546 caller_frame.is_entry_frame() ||
1547 caller_frame.is_upcall_stub_frame()) {
1560 // so bypassing it in c2i adapter is benign.
1561 return callee->get_c2i_no_clinit_check_entry();
1562 } else {
1563 return callee->get_c2i_entry();
1564 }
1565 }
1566
1567 // Must be compiled to compiled path which is safe to stackwalk
1568 methodHandle callee_method;
1569 JRT_BLOCK
1570 // Force resolving of caller (if we called from compiled frame)
1571 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1572 current->set_vm_result_2(callee_method());
1573 JRT_BLOCK_END
1574 // return compiled code entry point after potential safepoints
1575 return get_resolved_entry(current, callee_method);
1576 JRT_END
1577
1578 // Handle abstract method call
1579 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1580 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1581
1582 // Verbose error message for AbstractMethodError.
1583 // Get the called method from the invoke bytecode.
1584 vframeStream vfst(current, true);
1585 assert(!vfst.at_end(), "Java frame must exist");
1586 methodHandle caller(current, vfst.method());
1587 Bytecode_invoke invoke(caller, vfst.bci());
1588 DEBUG_ONLY( invoke.verify(); )
1589
1590 // Find the compiled caller frame.
1591 RegisterMap reg_map(current,
1592 RegisterMap::UpdateMap::include,
1593 RegisterMap::ProcessFrames::include,
1594 RegisterMap::WalkContinuation::skip);
1595 frame stubFrame = current->last_frame();
1596 assert(stubFrame.is_runtime_frame(), "must be");
1597 frame callerFrame = stubFrame.sender(®_map);
1598 assert(callerFrame.is_compiled_frame(), "must be");
1599
1600 // Install exception and return forward entry.
1601 address res = SharedRuntime::throw_AbstractMethodError_entry();
1608 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1609 }
1610 JRT_BLOCK_END
1611 return res;
1612 JRT_END
1613
1614 // return verified_code_entry if interp_only_mode is not set for the current thread;
1615 // otherwise return c2i entry.
1616 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1617 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1618 // In interp_only_mode we need to go to the interpreted entry
1619 // The c2i won't patch in this mode -- see fixup_callers_callsite
1620 return callee_method->get_c2i_entry();
1621 }
1622 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1623 return callee_method->verified_code_entry();
1624 }
1625
1626 // resolve a static call and patch code
1627 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1628 PerfTraceTime timer(_perf_resolve_static_total_time);
1629
1630 methodHandle callee_method;
1631 bool enter_special = false;
1632 JRT_BLOCK
1633 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1634 current->set_vm_result_2(callee_method());
1635 JRT_BLOCK_END
1636 // return compiled code entry point after potential safepoints
1637 return get_resolved_entry(current, callee_method);
1638 JRT_END
1639
1640 // resolve virtual call and update inline cache to monomorphic
1641 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1642 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1643
1644 methodHandle callee_method;
1645 JRT_BLOCK
1646 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1647 current->set_vm_result_2(callee_method());
1648 JRT_BLOCK_END
1649 // return compiled code entry point after potential safepoints
1650 return get_resolved_entry(current, callee_method);
1651 JRT_END
1652
1653
1654 // Resolve a virtual call that can be statically bound (e.g., always
1655 // monomorphic, so it has no inline cache). Patch code to resolved target.
1656 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1657 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1658
1659 methodHandle callee_method;
1660 JRT_BLOCK
1661 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1662 current->set_vm_result_2(callee_method());
1663 JRT_BLOCK_END
1664 // return compiled code entry point after potential safepoints
1665 return get_resolved_entry(current, callee_method);
1666 JRT_END
1667
1668 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1669 JavaThread* current = THREAD;
1670 ResourceMark rm(current);
1671 CallInfo call_info;
1672 Bytecodes::Code bc;
1673
1674 // receiver is null for static calls. An exception is thrown for null
1675 // receivers for non-static calls
1676 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1677
1678 methodHandle callee_method(current, call_info.selected_method());
1679
1680 Atomic::inc(&_ic_miss_ctr);
1681
1682 #ifndef PRODUCT
1683 // Statistics & Tracing
1684 if (TraceCallFixup) {
1685 ResourceMark rm(current);
1686 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1687 callee_method->print_short_name(tty);
1688 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1689 }
1690
1691 if (ICMissHistogram) {
1692 MutexLocker m(VMStatistic_lock);
1693 RegisterMap reg_map(current,
1694 RegisterMap::UpdateMap::skip,
1695 RegisterMap::ProcessFrames::include,
1696 RegisterMap::WalkContinuation::skip);
1697 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1698 // produce statistics under the lock
1699 trace_ic_miss(f.pc());
1700 }
1701 #endif
1702
1785 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1786 cdc->set_to_clean();
1787 break;
1788 }
1789
1790 case relocInfo::virtual_call_type: {
1791 // compiled, dispatched call (which used to call an interpreted method)
1792 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1793 inline_cache->set_to_clean();
1794 break;
1795 }
1796 default:
1797 break;
1798 }
1799 }
1800 }
1801 }
1802
1803 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1804
1805 Atomic::inc(&_wrong_method_ctr);
1806
1807 #ifndef PRODUCT
1808 if (TraceCallFixup) {
1809 ResourceMark rm(current);
1810 tty->print("handle_wrong_method reresolving call to");
1811 callee_method->print_short_name(tty);
1812 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1813 }
1814 #endif
1815
1816 return callee_method;
1817 }
1818
1819 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1820 // The faulting unsafe accesses should be changed to throw the error
1821 // synchronously instead. Meanwhile the faulting instruction will be
1822 // skipped over (effectively turning it into a no-op) and an
1823 // asynchronous exception will be raised which the thread will
1824 // handle at a later point. If the instruction is a load it will
1825 // return garbage.
1826
1827 // Request an async exception.
2085 // This is only called when CheckJNICalls is true, and only
2086 // for virtual thread termination.
2087 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2088 assert(CheckJNICalls, "Only call this when checking JNI usage");
2089 if (log_is_enabled(Debug, jni)) {
2090 JavaThread* current = JavaThread::current();
2091 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2092 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2093 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2094 ") exiting with Objects still locked by JNI MonitorEnter.",
2095 vthread_id, carrier_id);
2096 }
2097 JRT_END
2098
2099 #ifndef PRODUCT
2100
2101 void SharedRuntime::print_statistics() {
2102 ttyLocker ttyl;
2103 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2104
2105 SharedRuntime::print_ic_miss_histogram_on(tty);
2106 SharedRuntime::print_counters_on(tty);
2107 AdapterHandlerLibrary::print_statistics_on(tty);
2108
2109 if (xtty != nullptr) xtty->tail("statistics");
2110 }
2111
2112 //void SharedRuntime::print_counters_on(outputStream* st) {
2113 // // Dump the JRT_ENTRY counters
2114 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2115 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2116 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2117 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2118 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2119 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2120 //
2121 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2122 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2123 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2124 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2125 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2126 //
2127 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2128 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2129 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2130 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2131 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2132 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2133 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2134 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2135 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2136 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2137 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2138 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2139 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2140 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2141 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2142 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2143 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2144 //}
2145
2146 inline double percent(int64_t x, int64_t y) {
2147 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2148 }
2149
2150 class MethodArityHistogram {
2151 public:
2152 enum { MAX_ARITY = 256 };
2153 private:
2154 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2155 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2156 static uint64_t _total_compiled_calls;
2157 static uint64_t _max_compiled_calls_per_method;
2158 static int _max_arity; // max. arity seen
2159 static int _max_size; // max. arg size seen
2160
2161 static void add_method_to_histogram(nmethod* nm) {
2162 Method* method = (nm == nullptr) ? nullptr : nm->method();
2163 if (method != nullptr) {
2164 ArgumentCount args(method->signature());
2165 int arity = args.size() + (method->is_static() ? 0 : 1);
2210 // Take the Compile_lock to protect against changes in the CodeBlob structures
2211 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2212 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2213 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2214 _max_arity = _max_size = 0;
2215 _total_compiled_calls = 0;
2216 _max_compiled_calls_per_method = 0;
2217 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2218 CodeCache::nmethods_do(add_method_to_histogram);
2219 print_histogram();
2220 }
2221 };
2222
2223 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2224 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2225 uint64_t MethodArityHistogram::_total_compiled_calls;
2226 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2227 int MethodArityHistogram::_max_arity;
2228 int MethodArityHistogram::_max_size;
2229
2230 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2231 tty->print_cr("Calls from compiled code:");
2232 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2233 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2234 int64_t mono_i = _nof_interface_calls;
2235 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2236 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2237 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2238 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2239 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2240 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2241 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2242 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2243 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2244 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2245 tty->cr();
2246 tty->print_cr("Note 1: counter updates are not MT-safe.");
2247 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2248 tty->print_cr(" %% in nested categories are relative to their category");
2249 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2250 tty->cr();
2463 AdapterFingerPrint::equals>;
2464 static AdapterHandlerTable* _adapter_handler_table;
2465
2466 // Find a entry with the same fingerprint if it exists
2467 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2468 NOT_PRODUCT(_lookups++);
2469 assert_lock_strong(AdapterHandlerLibrary_lock);
2470 AdapterFingerPrint fp(total_args_passed, sig_bt);
2471 AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2472 if (entry != nullptr) {
2473 #ifndef PRODUCT
2474 if (fp.is_compact()) _compact++;
2475 _hits++;
2476 #endif
2477 return *entry;
2478 }
2479 return nullptr;
2480 }
2481
2482 #ifndef PRODUCT
2483 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2484 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2485 return sizeof(*key) + sizeof(*a);
2486 };
2487 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2488 ts.print(st, "AdapterHandlerTable");
2489 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2490 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2491 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2492 _lookups, _equals, _hits, _compact);
2493 }
2494 #endif // !PRODUCT
2495
2496 // ---------------------------------------------------------------------------
2497 // Implementation of AdapterHandlerLibrary
2498 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2499 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2500 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2501 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2502 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2503 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2504 const int AdapterHandlerLibrary_size = 16*K;
2505 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2506
2507 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2508 return _buffer;
2509 }
2510
2511 static void post_adapter_creation(const AdapterBlob* new_adapter,
2512 const AdapterHandlerEntry* entry) {
2513 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2514 char blob_id[256];
3162 assert(found, "Should have found handler");
3163 }
3164
3165 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3166 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3167 if (get_i2c_entry() != nullptr) {
3168 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3169 }
3170 if (get_c2i_entry() != nullptr) {
3171 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3172 }
3173 if (get_c2i_unverified_entry() != nullptr) {
3174 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3175 }
3176 if (get_c2i_no_clinit_check_entry() != nullptr) {
3177 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3178 }
3179 st->cr();
3180 }
3181
3182 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3183 assert(current == JavaThread::current(), "pre-condition");
3184 StackOverflow* overflow_state = current->stack_overflow_state();
3185 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3186 overflow_state->set_reserved_stack_activation(current->stack_base());
3187 JRT_END
3188
3189 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3190 ResourceMark rm(current);
3191 frame activation;
3192 nmethod* nm = nullptr;
3193 int count = 1;
3194
3195 assert(fr.is_java_frame(), "Must start on Java frame");
3196
3197 RegisterMap map(JavaThread::current(),
3198 RegisterMap::UpdateMap::skip,
3199 RegisterMap::ProcessFrames::skip,
3200 RegisterMap::WalkContinuation::skip); // don't walk continuations
3201 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|