51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomicAccess.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/osThread.hpp"
71 #include "runtime/perfData.hpp"
72 #include "runtime/sharedRuntime.hpp"
73 #include "runtime/stackWatermarkSet.hpp"
74 #include "runtime/stubRoutines.hpp"
75 #include "runtime/synchronizer.inline.hpp"
76 #include "runtime/timerTrace.hpp"
77 #include "runtime/vframe.inline.hpp"
78 #include "runtime/vframeArray.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "utilities/copy.hpp"
81 #include "utilities/dtrace.hpp"
82 #include "utilities/events.hpp"
83 #include "utilities/globalDefinitions.hpp"
84 #include "utilities/hashTable.hpp"
85 #include "utilities/macros.hpp"
86 #include "utilities/xmlstream.hpp"
87 #ifdef COMPILER1
88 #include "c1/c1_Runtime1.hpp"
89 #endif
90 #if INCLUDE_JFR
91 #include "jfr/jfr.inline.hpp"
92 #endif
93
94 // Shared runtime stub routines reside in their own unique blob with a
95 // single entry point
96
97
98 #define SHARED_STUB_FIELD_DEFINE(name, type) \
99 type* SharedRuntime::BLOB_FIELD_NAME(name);
100 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
101 #undef SHARED_STUB_FIELD_DEFINE
102
103 nmethod* SharedRuntime::_cont_doYield_stub;
104
105 #if 0
106 // TODO tweak global stub name generation to match this
107 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
108 const char *SharedRuntime::_stub_names[] = {
109 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
110 };
111 #endif
112
113 //----------------------------generate_stubs-----------------------------------
114 void SharedRuntime::generate_initial_stubs() {
115 // Build this early so it's available for the interpreter.
116 _throw_StackOverflowError_blob =
117 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
118 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
119 }
120
121 void SharedRuntime::generate_stubs() {
122 _wrong_method_blob =
123 generate_resolve_blob(StubId::shared_wrong_method_id,
124 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
154 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
155 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
156
157 #if COMPILER2_OR_JVMCI
158 // Vectors are generated only by C2 and JVMCI.
159 bool support_wide = is_wide_vector(MaxVectorSize);
160 if (support_wide) {
161 _polling_page_vectors_safepoint_handler_blob =
162 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
163 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
164 }
165 #endif // COMPILER2_OR_JVMCI
166 _polling_page_safepoint_handler_blob =
167 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
168 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
169 _polling_page_return_handler_blob =
170 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
171 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
172
173 generate_deopt_blob();
174 }
175
176 void SharedRuntime::init_adapter_library() {
177 AdapterHandlerLibrary::initialize();
178 }
179
180 #if INCLUDE_JFR
181 //------------------------------generate jfr runtime stubs ------
182 void SharedRuntime::generate_jfr_stubs() {
183 ResourceMark rm;
184 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
185 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
186
187 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
188 _jfr_return_lease_blob = generate_jfr_return_lease();
189 }
190
191 #endif // INCLUDE_JFR
192
193 #include <math.h>
194
195 // Implementation of SharedRuntime
196
197 #ifndef PRODUCT
198 // For statistics
199 uint SharedRuntime::_ic_miss_ctr = 0;
200 uint SharedRuntime::_wrong_method_ctr = 0;
201 uint SharedRuntime::_resolve_static_ctr = 0;
202 uint SharedRuntime::_resolve_virtual_ctr = 0;
203 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
204 uint SharedRuntime::_implicit_null_throws = 0;
205 uint SharedRuntime::_implicit_div0_throws = 0;
206
207 int64_t SharedRuntime::_nof_normal_calls = 0;
208 int64_t SharedRuntime::_nof_inlined_calls = 0;
209 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
210 int64_t SharedRuntime::_nof_static_calls = 0;
211 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
212 int64_t SharedRuntime::_nof_interface_calls = 0;
213 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
214
215 uint SharedRuntime::_new_instance_ctr=0;
216 uint SharedRuntime::_new_array_ctr=0;
217 uint SharedRuntime::_multi2_ctr=0;
218 uint SharedRuntime::_multi3_ctr=0;
219 uint SharedRuntime::_multi4_ctr=0;
220 uint SharedRuntime::_multi5_ctr=0;
221 uint SharedRuntime::_mon_enter_stub_ctr=0;
222 uint SharedRuntime::_mon_exit_stub_ctr=0;
223 uint SharedRuntime::_mon_enter_ctr=0;
237 uint SharedRuntime::_unsafe_set_memory_ctr=0;
238
239 int SharedRuntime::_ICmiss_index = 0;
240 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
241 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
242
243
244 void SharedRuntime::trace_ic_miss(address at) {
245 for (int i = 0; i < _ICmiss_index; i++) {
246 if (_ICmiss_at[i] == at) {
247 _ICmiss_count[i]++;
248 return;
249 }
250 }
251 int index = _ICmiss_index++;
252 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
253 _ICmiss_at[index] = at;
254 _ICmiss_count[index] = 1;
255 }
256
257 void SharedRuntime::print_ic_miss_histogram() {
258 if (ICMissHistogram) {
259 tty->print_cr("IC Miss Histogram:");
260 int tot_misses = 0;
261 for (int i = 0; i < _ICmiss_index; i++) {
262 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
263 tot_misses += _ICmiss_count[i];
264 }
265 tty->print_cr("Total IC misses: %7d", tot_misses);
266 }
267 }
268
269 #ifdef COMPILER2
270 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
271 void SharedRuntime::debug_print_value(jboolean x) {
272 tty->print_cr("boolean %d", x);
273 }
274
275 void SharedRuntime::debug_print_value(jbyte x) {
276 tty->print_cr("byte %d", x);
277 }
278
279 void SharedRuntime::debug_print_value(jshort x) {
280 tty->print_cr("short %d", x);
281 }
282
283 void SharedRuntime::debug_print_value(jchar x) {
284 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
285 }
754 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
755 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
756 JNIHandles::destroy_local(vthread);
757 JRT_END
758 #endif // INCLUDE_JVMTI
759
760 // The interpreter code to call this tracing function is only
761 // called/generated when UL is on for redefine, class and has the right level
762 // and tags. Since obsolete methods are never compiled, we don't have
763 // to modify the compilers to generate calls to this function.
764 //
765 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
766 JavaThread* thread, Method* method))
767 if (method->is_obsolete()) {
768 // We are calling an obsolete method, but this is not necessarily
769 // an error. Our method could have been redefined just after we
770 // fetched the Method* from the constant pool.
771 ResourceMark rm;
772 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
773 }
774 return 0;
775 JRT_END
776
777 // ret_pc points into caller; we are returning caller's exception handler
778 // for given exception
779 // Note that the implementation of this method assumes it's only called when an exception has actually occured
780 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
781 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
782 assert(nm != nullptr, "must exist");
783 ResourceMark rm;
784
785 #if INCLUDE_JVMCI
786 if (nm->is_compiled_by_jvmci()) {
787 // lookup exception handler for this pc
788 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
789 ExceptionHandlerTable table(nm);
790 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
791 if (t != nullptr) {
792 return nm->code_begin() + t->pco();
793 } else {
1393
1394 // determine call info & receiver
1395 // note: a) receiver is null for static calls
1396 // b) an exception is thrown if receiver is null for non-static calls
1397 CallInfo call_info;
1398 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1399 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1400
1401 NoSafepointVerifier nsv;
1402
1403 methodHandle callee_method(current, call_info.selected_method());
1404
1405 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1406 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1407 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1408 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1409 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1410
1411 assert(!caller_nm->is_unloading(), "It should not be unloading");
1412
1413 #ifndef PRODUCT
1414 // tracing/debugging/statistics
1415 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1416 (is_virtual) ? (&_resolve_virtual_ctr) :
1417 (&_resolve_static_ctr);
1418 AtomicAccess::inc(addr);
1419
1420 if (TraceCallFixup) {
1421 ResourceMark rm(current);
1422 tty->print("resolving %s%s (%s) call to",
1423 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1424 Bytecodes::name(invoke_code));
1425 callee_method->print_short_name(tty);
1426 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1427 p2i(caller_frame.pc()), p2i(callee_method->code()));
1428 }
1429 #endif
1430
1431 if (invoke_code == Bytecodes::_invokestatic) {
1432 assert(callee_method->method_holder()->is_initialized() ||
1433 callee_method->method_holder()->is_reentrant_initialization(current),
1434 "invalid class initialization state for invoke_static");
1435 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1436 // In order to keep class initialization check, do not patch call
1437 // site for static call when the class is not fully initialized.
1438 // Proper check is enforced by call site re-resolution on every invocation.
1439 //
1455
1456 // Make sure the callee nmethod does not get deoptimized and removed before
1457 // we are done patching the code.
1458
1459
1460 CompiledICLocker ml(caller_nm);
1461 if (is_virtual && !is_optimized) {
1462 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1463 inline_cache->update(&call_info, receiver->klass());
1464 } else {
1465 // Callsite is a direct call - set it to the destination method
1466 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1467 callsite->set(callee_method);
1468 }
1469
1470 return callee_method;
1471 }
1472
1473 // Inline caches exist only in compiled code
1474 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1475 #ifdef ASSERT
1476 RegisterMap reg_map(current,
1477 RegisterMap::UpdateMap::skip,
1478 RegisterMap::ProcessFrames::include,
1479 RegisterMap::WalkContinuation::skip);
1480 frame stub_frame = current->last_frame();
1481 assert(stub_frame.is_runtime_frame(), "sanity check");
1482 frame caller_frame = stub_frame.sender(®_map);
1483 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1484 #endif /* ASSERT */
1485
1486 methodHandle callee_method;
1487 JRT_BLOCK
1488 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1489 // Return Method* through TLS
1490 current->set_vm_result_metadata(callee_method());
1491 JRT_BLOCK_END
1492 // return compiled code entry point after potential safepoints
1493 return get_resolved_entry(current, callee_method);
1494 JRT_END
1495
1496
1497 // Handle call site that has been made non-entrant
1498 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1499 // 6243940 We might end up in here if the callee is deoptimized
1500 // as we race to call it. We don't want to take a safepoint if
1501 // the caller was interpreted because the caller frame will look
1502 // interpreted to the stack walkers and arguments are now
1503 // "compiled" so it is much better to make this transition
1504 // invisible to the stack walking code. The i2c path will
1505 // place the callee method in the callee_target. It is stashed
1506 // there because if we try and find the callee by normal means a
1507 // safepoint is possible and have trouble gc'ing the compiled args.
1508 RegisterMap reg_map(current,
1509 RegisterMap::UpdateMap::skip,
1510 RegisterMap::ProcessFrames::include,
1511 RegisterMap::WalkContinuation::skip);
1512 frame stub_frame = current->last_frame();
1513 assert(stub_frame.is_runtime_frame(), "sanity check");
1514 frame caller_frame = stub_frame.sender(®_map);
1515
1516 if (caller_frame.is_interpreted_frame() ||
1517 caller_frame.is_entry_frame() ||
1518 caller_frame.is_upcall_stub_frame()) {
1531 // so bypassing it in c2i adapter is benign.
1532 return callee->get_c2i_no_clinit_check_entry();
1533 } else {
1534 return callee->get_c2i_entry();
1535 }
1536 }
1537
1538 // Must be compiled to compiled path which is safe to stackwalk
1539 methodHandle callee_method;
1540 JRT_BLOCK
1541 // Force resolving of caller (if we called from compiled frame)
1542 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1543 current->set_vm_result_metadata(callee_method());
1544 JRT_BLOCK_END
1545 // return compiled code entry point after potential safepoints
1546 return get_resolved_entry(current, callee_method);
1547 JRT_END
1548
1549 // Handle abstract method call
1550 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1551 // Verbose error message for AbstractMethodError.
1552 // Get the called method from the invoke bytecode.
1553 vframeStream vfst(current, true);
1554 assert(!vfst.at_end(), "Java frame must exist");
1555 methodHandle caller(current, vfst.method());
1556 Bytecode_invoke invoke(caller, vfst.bci());
1557 DEBUG_ONLY( invoke.verify(); )
1558
1559 // Find the compiled caller frame.
1560 RegisterMap reg_map(current,
1561 RegisterMap::UpdateMap::include,
1562 RegisterMap::ProcessFrames::include,
1563 RegisterMap::WalkContinuation::skip);
1564 frame stubFrame = current->last_frame();
1565 assert(stubFrame.is_runtime_frame(), "must be");
1566 frame callerFrame = stubFrame.sender(®_map);
1567 assert(callerFrame.is_compiled_frame(), "must be");
1568
1569 // Install exception and return forward entry.
1570 address res = SharedRuntime::throw_AbstractMethodError_entry();
1577 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1578 }
1579 JRT_BLOCK_END
1580 return res;
1581 JRT_END
1582
1583 // return verified_code_entry if interp_only_mode is not set for the current thread;
1584 // otherwise return c2i entry.
1585 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1586 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1587 // In interp_only_mode we need to go to the interpreted entry
1588 // The c2i won't patch in this mode -- see fixup_callers_callsite
1589 return callee_method->get_c2i_entry();
1590 }
1591 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1592 return callee_method->verified_code_entry();
1593 }
1594
1595 // resolve a static call and patch code
1596 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1597 methodHandle callee_method;
1598 bool enter_special = false;
1599 JRT_BLOCK
1600 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1601 current->set_vm_result_metadata(callee_method());
1602 JRT_BLOCK_END
1603 // return compiled code entry point after potential safepoints
1604 return get_resolved_entry(current, callee_method);
1605 JRT_END
1606
1607 // resolve virtual call and update inline cache to monomorphic
1608 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1609 methodHandle callee_method;
1610 JRT_BLOCK
1611 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1612 current->set_vm_result_metadata(callee_method());
1613 JRT_BLOCK_END
1614 // return compiled code entry point after potential safepoints
1615 return get_resolved_entry(current, callee_method);
1616 JRT_END
1617
1618
1619 // Resolve a virtual call that can be statically bound (e.g., always
1620 // monomorphic, so it has no inline cache). Patch code to resolved target.
1621 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1622 methodHandle callee_method;
1623 JRT_BLOCK
1624 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1625 current->set_vm_result_metadata(callee_method());
1626 JRT_BLOCK_END
1627 // return compiled code entry point after potential safepoints
1628 return get_resolved_entry(current, callee_method);
1629 JRT_END
1630
1631 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1632 JavaThread* current = THREAD;
1633 ResourceMark rm(current);
1634 CallInfo call_info;
1635 Bytecodes::Code bc;
1636
1637 // receiver is null for static calls. An exception is thrown for null
1638 // receivers for non-static calls
1639 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1640
1641 methodHandle callee_method(current, call_info.selected_method());
1642
1643 #ifndef PRODUCT
1644 AtomicAccess::inc(&_ic_miss_ctr);
1645
1646 // Statistics & Tracing
1647 if (TraceCallFixup) {
1648 ResourceMark rm(current);
1649 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1650 callee_method->print_short_name(tty);
1651 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1652 }
1653
1654 if (ICMissHistogram) {
1655 MutexLocker m(VMStatistic_lock);
1656 RegisterMap reg_map(current,
1657 RegisterMap::UpdateMap::skip,
1658 RegisterMap::ProcessFrames::include,
1659 RegisterMap::WalkContinuation::skip);
1660 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1661 // produce statistics under the lock
1662 trace_ic_miss(f.pc());
1663 }
1664 #endif
1665
1748 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1749 cdc->set_to_clean();
1750 break;
1751 }
1752
1753 case relocInfo::virtual_call_type: {
1754 // compiled, dispatched call (which used to call an interpreted method)
1755 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1756 inline_cache->set_to_clean();
1757 break;
1758 }
1759 default:
1760 break;
1761 }
1762 }
1763 }
1764 }
1765
1766 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1767
1768
1769 #ifndef PRODUCT
1770 AtomicAccess::inc(&_wrong_method_ctr);
1771
1772 if (TraceCallFixup) {
1773 ResourceMark rm(current);
1774 tty->print("handle_wrong_method reresolving call to");
1775 callee_method->print_short_name(tty);
1776 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1777 }
1778 #endif
1779
1780 return callee_method;
1781 }
1782
1783 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1784 // The faulting unsafe accesses should be changed to throw the error
1785 // synchronously instead. Meanwhile the faulting instruction will be
1786 // skipped over (effectively turning it into a no-op) and an
1787 // asynchronous exception will be raised which the thread will
1788 // handle at a later point. If the instruction is a load it will
1789 // return garbage.
1790
1791 // Request an async exception.
2034 if (CheckJNICalls) {
2035 fatal("Object has been unlocked by JNI");
2036 }
2037 return;
2038 }
2039 ObjectSynchronizer::exit(obj, lock, current);
2040 }
2041
2042 // Handles the uncommon cases of monitor unlocking in compiled code
2043 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2044 assert(current == JavaThread::current(), "pre-condition");
2045 SharedRuntime::monitor_exit_helper(obj, lock, current);
2046 JRT_END
2047
2048 #ifndef PRODUCT
2049
2050 void SharedRuntime::print_statistics() {
2051 ttyLocker ttyl;
2052 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2053
2054 SharedRuntime::print_ic_miss_histogram();
2055
2056 // Dump the JRT_ENTRY counters
2057 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2058 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2059 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2060 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2061 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2062 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2063
2064 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2065 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2066 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2067 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2068 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2069
2070 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2071 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2072 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2073 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2074 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2075 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2076 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2077 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2078 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2079 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2080 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2081 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2082 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2083 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2084 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2085 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2086 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2087
2088 AdapterHandlerLibrary::print_statistics();
2089
2090 if (xtty != nullptr) xtty->tail("statistics");
2091 }
2092
2093 inline double percent(int64_t x, int64_t y) {
2094 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2095 }
2096
2097 class MethodArityHistogram {
2098 public:
2099 enum { MAX_ARITY = 256 };
2100 private:
2101 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2102 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2103 static uint64_t _total_compiled_calls;
2104 static uint64_t _max_compiled_calls_per_method;
2105 static int _max_arity; // max. arity seen
2106 static int _max_size; // max. arg size seen
2107
2108 static void add_method_to_histogram(nmethod* nm) {
2109 Method* method = (nm == nullptr) ? nullptr : nm->method();
2110 if (method != nullptr) {
2111 ArgumentCount args(method->signature());
2112 int arity = args.size() + (method->is_static() ? 0 : 1);
2157 // Take the Compile_lock to protect against changes in the CodeBlob structures
2158 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2159 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2160 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2161 _max_arity = _max_size = 0;
2162 _total_compiled_calls = 0;
2163 _max_compiled_calls_per_method = 0;
2164 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2165 CodeCache::nmethods_do(add_method_to_histogram);
2166 print_histogram();
2167 }
2168 };
2169
2170 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2171 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2172 uint64_t MethodArityHistogram::_total_compiled_calls;
2173 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2174 int MethodArityHistogram::_max_arity;
2175 int MethodArityHistogram::_max_size;
2176
2177 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2178 tty->print_cr("Calls from compiled code:");
2179 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2180 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2181 int64_t mono_i = _nof_interface_calls;
2182 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2183 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2184 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2185 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2186 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2187 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2188 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2189 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2190 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2191 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2192 tty->cr();
2193 tty->print_cr("Note 1: counter updates are not MT-safe.");
2194 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2195 tty->print_cr(" %% in nested categories are relative to their category");
2196 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2197 tty->cr();
2501 }
2502 #endif // INCLUDE_CDS
2503 if (entry == nullptr) {
2504 assert_lock_strong(AdapterHandlerLibrary_lock);
2505 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2506 if (entry_p != nullptr) {
2507 entry = *entry_p;
2508 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2509 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2510 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2511 #ifndef PRODUCT
2512 _runtime_hits++;
2513 #endif
2514 }
2515 }
2516 AdapterFingerPrint::deallocate(fp);
2517 return entry;
2518 }
2519
2520 #ifndef PRODUCT
2521 static void print_table_statistics() {
2522 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2523 return sizeof(*key) + sizeof(*a);
2524 };
2525 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2526 ts.print(tty, "AdapterHandlerTable");
2527 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2528 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2529 int total_hits = _archived_hits + _runtime_hits;
2530 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2531 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2532 }
2533 #endif
2534
2535 // ---------------------------------------------------------------------------
2536 // Implementation of AdapterHandlerLibrary
2537 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2538 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2539 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2540 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2541 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2542 #if INCLUDE_CDS
2543 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2544 #endif // INCLUDE_CDS
2545 static const int AdapterHandlerLibrary_size = 16*K;
2546 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2547 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2548
2549 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2550 assert(_buffer != nullptr, "should be initialized");
2551 return _buffer;
2552 }
2553
3436 };
3437 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3438 _adapter_handler_table->iterate(findblob_runtime_table);
3439 }
3440 assert(found, "Should have found handler");
3441 }
3442
3443 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3444 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3445 if (adapter_blob() != nullptr) {
3446 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3447 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3448 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3449 if (get_c2i_no_clinit_check_entry() != nullptr) {
3450 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3451 }
3452 }
3453 st->cr();
3454 }
3455
3456 #ifndef PRODUCT
3457
3458 void AdapterHandlerLibrary::print_statistics() {
3459 print_table_statistics();
3460 }
3461
3462 #endif /* PRODUCT */
3463
3464 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3465 assert(current == JavaThread::current(), "pre-condition");
3466 StackOverflow* overflow_state = current->stack_overflow_state();
3467 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3468 overflow_state->set_reserved_stack_activation(current->stack_base());
3469 JRT_END
3470
3471 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3472 ResourceMark rm(current);
3473 frame activation;
3474 nmethod* nm = nullptr;
3475 int count = 1;
3476
3477 assert(fr.is_java_frame(), "Must start on Java frame");
3478
3479 RegisterMap map(JavaThread::current(),
3480 RegisterMap::UpdateMap::skip,
3481 RegisterMap::ProcessFrames::skip,
3482 RegisterMap::WalkContinuation::skip); // don't walk continuations
3483 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomicAccess.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/osThread.hpp"
71 #include "runtime/perfData.inline.hpp"
72 #include "runtime/sharedRuntime.hpp"
73 #include "runtime/stackWatermarkSet.hpp"
74 #include "runtime/stubRoutines.hpp"
75 #include "runtime/synchronizer.inline.hpp"
76 #include "runtime/timerTrace.hpp"
77 #include "runtime/vframe.inline.hpp"
78 #include "runtime/vframeArray.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "services/management.hpp"
81 #include "utilities/copy.hpp"
82 #include "utilities/dtrace.hpp"
83 #include "utilities/events.hpp"
84 #include "utilities/globalDefinitions.hpp"
85 #include "utilities/hashTable.hpp"
86 #include "utilities/macros.hpp"
87 #include "utilities/xmlstream.hpp"
88 #ifdef COMPILER1
89 #include "c1/c1_Runtime1.hpp"
90 #endif
91 #if INCLUDE_JFR
92 #include "jfr/jfr.inline.hpp"
93 #endif
94
95 // Shared runtime stub routines reside in their own unique blob with a
96 // single entry point
97
98
99 #define SHARED_STUB_FIELD_DEFINE(name, type) \
100 type* SharedRuntime::BLOB_FIELD_NAME(name);
101 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
102 #undef SHARED_STUB_FIELD_DEFINE
103
104 nmethod* SharedRuntime::_cont_doYield_stub;
105
106 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
107 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
108 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
109 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
110 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
111
112 #if 0
113 // TODO tweak global stub name generation to match this
114 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
115 const char *SharedRuntime::_stub_names[] = {
116 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
117 };
118 #endif
119
120 //----------------------------generate_stubs-----------------------------------
121 void SharedRuntime::generate_initial_stubs() {
122 // Build this early so it's available for the interpreter.
123 _throw_StackOverflowError_blob =
124 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
125 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
126 }
127
128 void SharedRuntime::generate_stubs() {
129 _wrong_method_blob =
130 generate_resolve_blob(StubId::shared_wrong_method_id,
131 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
161 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
162 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
163
164 #if COMPILER2_OR_JVMCI
165 // Vectors are generated only by C2 and JVMCI.
166 bool support_wide = is_wide_vector(MaxVectorSize);
167 if (support_wide) {
168 _polling_page_vectors_safepoint_handler_blob =
169 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
170 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
171 }
172 #endif // COMPILER2_OR_JVMCI
173 _polling_page_safepoint_handler_blob =
174 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
175 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
176 _polling_page_return_handler_blob =
177 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
178 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
179
180 generate_deopt_blob();
181
182 if (UsePerfData) {
183 EXCEPTION_MARK;
184 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
185 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
186 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
187 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
188 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
189 if (HAS_PENDING_EXCEPTION) {
190 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
191 }
192 }
193 }
194
195 void SharedRuntime::init_adapter_library() {
196 AdapterHandlerLibrary::initialize();
197 }
198
199 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
200 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
201 if (TraceThreadTime) {
202 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
203 }
204 st->print(" / %5d events", cnt);
205 st->cr();
206 }
207
208 void SharedRuntime::print_counters_on(outputStream* st) {
209 st->print_cr("SharedRuntime:");
210 if (UsePerfData) {
211 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
212 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
213 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
214 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
215 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
216
217 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
218 _perf_resolve_virtual_total_time->elapsed_counter_value() +
219 _perf_resolve_static_total_time->elapsed_counter_value() +
220 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
221 _perf_ic_miss_total_time->elapsed_counter_value());
222 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
223 if (TraceThreadTime) {
224 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
225 _perf_resolve_virtual_total_time->thread_counter_value() +
226 _perf_resolve_static_total_time->thread_counter_value() +
227 _perf_handle_wrong_method_total_time->thread_counter_value() +
228 _perf_ic_miss_total_time->thread_counter_value());
229 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
230
231 }
232 st->cr();
233 } else {
234 st->print_cr(" no data (UsePerfData is turned off)");
235 }
236 }
237
238 #if INCLUDE_JFR
239 //------------------------------generate jfr runtime stubs ------
240 void SharedRuntime::generate_jfr_stubs() {
241 ResourceMark rm;
242 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
243 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
244
245 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
246 _jfr_return_lease_blob = generate_jfr_return_lease();
247 }
248
249 #endif // INCLUDE_JFR
250
251 #include <math.h>
252
253 // Implementation of SharedRuntime
254
255 // For statistics
256 uint SharedRuntime::_ic_miss_ctr = 0;
257 uint SharedRuntime::_wrong_method_ctr = 0;
258 uint SharedRuntime::_resolve_static_ctr = 0;
259 uint SharedRuntime::_resolve_virtual_ctr = 0;
260 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
261
262 #ifndef PRODUCT
263 uint SharedRuntime::_implicit_null_throws = 0;
264 uint SharedRuntime::_implicit_div0_throws = 0;
265
266 int64_t SharedRuntime::_nof_normal_calls = 0;
267 int64_t SharedRuntime::_nof_inlined_calls = 0;
268 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
269 int64_t SharedRuntime::_nof_static_calls = 0;
270 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
271 int64_t SharedRuntime::_nof_interface_calls = 0;
272 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
273
274 uint SharedRuntime::_new_instance_ctr=0;
275 uint SharedRuntime::_new_array_ctr=0;
276 uint SharedRuntime::_multi2_ctr=0;
277 uint SharedRuntime::_multi3_ctr=0;
278 uint SharedRuntime::_multi4_ctr=0;
279 uint SharedRuntime::_multi5_ctr=0;
280 uint SharedRuntime::_mon_enter_stub_ctr=0;
281 uint SharedRuntime::_mon_exit_stub_ctr=0;
282 uint SharedRuntime::_mon_enter_ctr=0;
296 uint SharedRuntime::_unsafe_set_memory_ctr=0;
297
298 int SharedRuntime::_ICmiss_index = 0;
299 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
300 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
301
302
303 void SharedRuntime::trace_ic_miss(address at) {
304 for (int i = 0; i < _ICmiss_index; i++) {
305 if (_ICmiss_at[i] == at) {
306 _ICmiss_count[i]++;
307 return;
308 }
309 }
310 int index = _ICmiss_index++;
311 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
312 _ICmiss_at[index] = at;
313 _ICmiss_count[index] = 1;
314 }
315
316 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
317 if (ICMissHistogram) {
318 st->print_cr("IC Miss Histogram:");
319 int tot_misses = 0;
320 for (int i = 0; i < _ICmiss_index; i++) {
321 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
322 tot_misses += _ICmiss_count[i];
323 }
324 st->print_cr("Total IC misses: %7d", tot_misses);
325 }
326 }
327
328 #ifdef COMPILER2
329 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
330 void SharedRuntime::debug_print_value(jboolean x) {
331 tty->print_cr("boolean %d", x);
332 }
333
334 void SharedRuntime::debug_print_value(jbyte x) {
335 tty->print_cr("byte %d", x);
336 }
337
338 void SharedRuntime::debug_print_value(jshort x) {
339 tty->print_cr("short %d", x);
340 }
341
342 void SharedRuntime::debug_print_value(jchar x) {
343 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
344 }
813 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
814 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
815 JNIHandles::destroy_local(vthread);
816 JRT_END
817 #endif // INCLUDE_JVMTI
818
819 // The interpreter code to call this tracing function is only
820 // called/generated when UL is on for redefine, class and has the right level
821 // and tags. Since obsolete methods are never compiled, we don't have
822 // to modify the compilers to generate calls to this function.
823 //
824 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
825 JavaThread* thread, Method* method))
826 if (method->is_obsolete()) {
827 // We are calling an obsolete method, but this is not necessarily
828 // an error. Our method could have been redefined just after we
829 // fetched the Method* from the constant pool.
830 ResourceMark rm;
831 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
832 }
833
834 LogStreamHandle(Trace, interpreter, bytecode) log;
835 if (log.is_enabled()) {
836 ResourceMark rm;
837 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
838 p2i(thread),
839 (method->is_static() ? "static" : "virtual"),
840 method->name_and_sig_as_C_string(),
841 (method->is_native() ? " native" : ""),
842 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
843 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
844 }
845 return 0;
846 JRT_END
847
848 // ret_pc points into caller; we are returning caller's exception handler
849 // for given exception
850 // Note that the implementation of this method assumes it's only called when an exception has actually occured
851 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
852 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
853 assert(nm != nullptr, "must exist");
854 ResourceMark rm;
855
856 #if INCLUDE_JVMCI
857 if (nm->is_compiled_by_jvmci()) {
858 // lookup exception handler for this pc
859 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
860 ExceptionHandlerTable table(nm);
861 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
862 if (t != nullptr) {
863 return nm->code_begin() + t->pco();
864 } else {
1464
1465 // determine call info & receiver
1466 // note: a) receiver is null for static calls
1467 // b) an exception is thrown if receiver is null for non-static calls
1468 CallInfo call_info;
1469 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1470 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1471
1472 NoSafepointVerifier nsv;
1473
1474 methodHandle callee_method(current, call_info.selected_method());
1475
1476 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1477 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1478 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1479 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1480 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1481
1482 assert(!caller_nm->is_unloading(), "It should not be unloading");
1483
1484 // tracing/debugging/statistics
1485 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1486 (is_virtual) ? (&_resolve_virtual_ctr) :
1487 (&_resolve_static_ctr);
1488 AtomicAccess::inc(addr);
1489
1490 #ifndef PRODUCT
1491 if (TraceCallFixup) {
1492 ResourceMark rm(current);
1493 tty->print("resolving %s%s (%s) call to",
1494 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1495 Bytecodes::name(invoke_code));
1496 callee_method->print_short_name(tty);
1497 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1498 p2i(caller_frame.pc()), p2i(callee_method->code()));
1499 }
1500 #endif
1501
1502 if (invoke_code == Bytecodes::_invokestatic) {
1503 assert(callee_method->method_holder()->is_initialized() ||
1504 callee_method->method_holder()->is_reentrant_initialization(current),
1505 "invalid class initialization state for invoke_static");
1506 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1507 // In order to keep class initialization check, do not patch call
1508 // site for static call when the class is not fully initialized.
1509 // Proper check is enforced by call site re-resolution on every invocation.
1510 //
1526
1527 // Make sure the callee nmethod does not get deoptimized and removed before
1528 // we are done patching the code.
1529
1530
1531 CompiledICLocker ml(caller_nm);
1532 if (is_virtual && !is_optimized) {
1533 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1534 inline_cache->update(&call_info, receiver->klass());
1535 } else {
1536 // Callsite is a direct call - set it to the destination method
1537 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1538 callsite->set(callee_method);
1539 }
1540
1541 return callee_method;
1542 }
1543
1544 // Inline caches exist only in compiled code
1545 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1546 PerfTraceTime timer(_perf_ic_miss_total_time);
1547
1548 #ifdef ASSERT
1549 RegisterMap reg_map(current,
1550 RegisterMap::UpdateMap::skip,
1551 RegisterMap::ProcessFrames::include,
1552 RegisterMap::WalkContinuation::skip);
1553 frame stub_frame = current->last_frame();
1554 assert(stub_frame.is_runtime_frame(), "sanity check");
1555 frame caller_frame = stub_frame.sender(®_map);
1556 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1557 #endif /* ASSERT */
1558
1559 methodHandle callee_method;
1560 JRT_BLOCK
1561 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1562 // Return Method* through TLS
1563 current->set_vm_result_metadata(callee_method());
1564 JRT_BLOCK_END
1565 // return compiled code entry point after potential safepoints
1566 return get_resolved_entry(current, callee_method);
1567 JRT_END
1568
1569
1570 // Handle call site that has been made non-entrant
1571 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1572 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1573
1574 // 6243940 We might end up in here if the callee is deoptimized
1575 // as we race to call it. We don't want to take a safepoint if
1576 // the caller was interpreted because the caller frame will look
1577 // interpreted to the stack walkers and arguments are now
1578 // "compiled" so it is much better to make this transition
1579 // invisible to the stack walking code. The i2c path will
1580 // place the callee method in the callee_target. It is stashed
1581 // there because if we try and find the callee by normal means a
1582 // safepoint is possible and have trouble gc'ing the compiled args.
1583 RegisterMap reg_map(current,
1584 RegisterMap::UpdateMap::skip,
1585 RegisterMap::ProcessFrames::include,
1586 RegisterMap::WalkContinuation::skip);
1587 frame stub_frame = current->last_frame();
1588 assert(stub_frame.is_runtime_frame(), "sanity check");
1589 frame caller_frame = stub_frame.sender(®_map);
1590
1591 if (caller_frame.is_interpreted_frame() ||
1592 caller_frame.is_entry_frame() ||
1593 caller_frame.is_upcall_stub_frame()) {
1606 // so bypassing it in c2i adapter is benign.
1607 return callee->get_c2i_no_clinit_check_entry();
1608 } else {
1609 return callee->get_c2i_entry();
1610 }
1611 }
1612
1613 // Must be compiled to compiled path which is safe to stackwalk
1614 methodHandle callee_method;
1615 JRT_BLOCK
1616 // Force resolving of caller (if we called from compiled frame)
1617 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1618 current->set_vm_result_metadata(callee_method());
1619 JRT_BLOCK_END
1620 // return compiled code entry point after potential safepoints
1621 return get_resolved_entry(current, callee_method);
1622 JRT_END
1623
1624 // Handle abstract method call
1625 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1626 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1627
1628 // Verbose error message for AbstractMethodError.
1629 // Get the called method from the invoke bytecode.
1630 vframeStream vfst(current, true);
1631 assert(!vfst.at_end(), "Java frame must exist");
1632 methodHandle caller(current, vfst.method());
1633 Bytecode_invoke invoke(caller, vfst.bci());
1634 DEBUG_ONLY( invoke.verify(); )
1635
1636 // Find the compiled caller frame.
1637 RegisterMap reg_map(current,
1638 RegisterMap::UpdateMap::include,
1639 RegisterMap::ProcessFrames::include,
1640 RegisterMap::WalkContinuation::skip);
1641 frame stubFrame = current->last_frame();
1642 assert(stubFrame.is_runtime_frame(), "must be");
1643 frame callerFrame = stubFrame.sender(®_map);
1644 assert(callerFrame.is_compiled_frame(), "must be");
1645
1646 // Install exception and return forward entry.
1647 address res = SharedRuntime::throw_AbstractMethodError_entry();
1654 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1655 }
1656 JRT_BLOCK_END
1657 return res;
1658 JRT_END
1659
1660 // return verified_code_entry if interp_only_mode is not set for the current thread;
1661 // otherwise return c2i entry.
1662 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1663 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1664 // In interp_only_mode we need to go to the interpreted entry
1665 // The c2i won't patch in this mode -- see fixup_callers_callsite
1666 return callee_method->get_c2i_entry();
1667 }
1668 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1669 return callee_method->verified_code_entry();
1670 }
1671
1672 // resolve a static call and patch code
1673 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1674 PerfTraceTime timer(_perf_resolve_static_total_time);
1675
1676 methodHandle callee_method;
1677 bool enter_special = false;
1678 JRT_BLOCK
1679 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1680 current->set_vm_result_metadata(callee_method());
1681 JRT_BLOCK_END
1682 // return compiled code entry point after potential safepoints
1683 return get_resolved_entry(current, callee_method);
1684 JRT_END
1685
1686 // resolve virtual call and update inline cache to monomorphic
1687 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1688 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1689
1690 methodHandle callee_method;
1691 JRT_BLOCK
1692 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1693 current->set_vm_result_metadata(callee_method());
1694 JRT_BLOCK_END
1695 // return compiled code entry point after potential safepoints
1696 return get_resolved_entry(current, callee_method);
1697 JRT_END
1698
1699
1700 // Resolve a virtual call that can be statically bound (e.g., always
1701 // monomorphic, so it has no inline cache). Patch code to resolved target.
1702 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1703 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1704
1705 methodHandle callee_method;
1706 JRT_BLOCK
1707 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1708 current->set_vm_result_metadata(callee_method());
1709 JRT_BLOCK_END
1710 // return compiled code entry point after potential safepoints
1711 return get_resolved_entry(current, callee_method);
1712 JRT_END
1713
1714 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1715 JavaThread* current = THREAD;
1716 ResourceMark rm(current);
1717 CallInfo call_info;
1718 Bytecodes::Code bc;
1719
1720 // receiver is null for static calls. An exception is thrown for null
1721 // receivers for non-static calls
1722 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1723
1724 methodHandle callee_method(current, call_info.selected_method());
1725
1726 AtomicAccess::inc(&_ic_miss_ctr);
1727
1728 #ifndef PRODUCT
1729 // Statistics & Tracing
1730 if (TraceCallFixup) {
1731 ResourceMark rm(current);
1732 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1733 callee_method->print_short_name(tty);
1734 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1735 }
1736
1737 if (ICMissHistogram) {
1738 MutexLocker m(VMStatistic_lock);
1739 RegisterMap reg_map(current,
1740 RegisterMap::UpdateMap::skip,
1741 RegisterMap::ProcessFrames::include,
1742 RegisterMap::WalkContinuation::skip);
1743 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1744 // produce statistics under the lock
1745 trace_ic_miss(f.pc());
1746 }
1747 #endif
1748
1831 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1832 cdc->set_to_clean();
1833 break;
1834 }
1835
1836 case relocInfo::virtual_call_type: {
1837 // compiled, dispatched call (which used to call an interpreted method)
1838 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1839 inline_cache->set_to_clean();
1840 break;
1841 }
1842 default:
1843 break;
1844 }
1845 }
1846 }
1847 }
1848
1849 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1850
1851 AtomicAccess::inc(&_wrong_method_ctr);
1852
1853 #ifndef PRODUCT
1854 if (TraceCallFixup) {
1855 ResourceMark rm(current);
1856 tty->print("handle_wrong_method reresolving call to");
1857 callee_method->print_short_name(tty);
1858 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1859 }
1860 #endif
1861
1862 return callee_method;
1863 }
1864
1865 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1866 // The faulting unsafe accesses should be changed to throw the error
1867 // synchronously instead. Meanwhile the faulting instruction will be
1868 // skipped over (effectively turning it into a no-op) and an
1869 // asynchronous exception will be raised which the thread will
1870 // handle at a later point. If the instruction is a load it will
1871 // return garbage.
1872
1873 // Request an async exception.
2116 if (CheckJNICalls) {
2117 fatal("Object has been unlocked by JNI");
2118 }
2119 return;
2120 }
2121 ObjectSynchronizer::exit(obj, lock, current);
2122 }
2123
2124 // Handles the uncommon cases of monitor unlocking in compiled code
2125 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2126 assert(current == JavaThread::current(), "pre-condition");
2127 SharedRuntime::monitor_exit_helper(obj, lock, current);
2128 JRT_END
2129
2130 #ifndef PRODUCT
2131
2132 void SharedRuntime::print_statistics() {
2133 ttyLocker ttyl;
2134 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2135
2136 SharedRuntime::print_ic_miss_histogram_on(tty);
2137 SharedRuntime::print_counters_on(tty);
2138 AdapterHandlerLibrary::print_statistics_on(tty);
2139
2140 if (xtty != nullptr) xtty->tail("statistics");
2141 }
2142
2143 //void SharedRuntime::print_counters_on(outputStream* st) {
2144 // // Dump the JRT_ENTRY counters
2145 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2146 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2147 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2148 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2149 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2150 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2151 //
2152 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2153 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2154 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2155 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2156 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2157 //
2158 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2159 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2160 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2161 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2162 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2163 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2164 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2165 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2166 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2167 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2168 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2169 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2170 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2171 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2172 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2173 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2174 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2175 //}
2176
2177 inline double percent(int64_t x, int64_t y) {
2178 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2179 }
2180
2181 class MethodArityHistogram {
2182 public:
2183 enum { MAX_ARITY = 256 };
2184 private:
2185 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2186 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2187 static uint64_t _total_compiled_calls;
2188 static uint64_t _max_compiled_calls_per_method;
2189 static int _max_arity; // max. arity seen
2190 static int _max_size; // max. arg size seen
2191
2192 static void add_method_to_histogram(nmethod* nm) {
2193 Method* method = (nm == nullptr) ? nullptr : nm->method();
2194 if (method != nullptr) {
2195 ArgumentCount args(method->signature());
2196 int arity = args.size() + (method->is_static() ? 0 : 1);
2241 // Take the Compile_lock to protect against changes in the CodeBlob structures
2242 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2243 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2244 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2245 _max_arity = _max_size = 0;
2246 _total_compiled_calls = 0;
2247 _max_compiled_calls_per_method = 0;
2248 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2249 CodeCache::nmethods_do(add_method_to_histogram);
2250 print_histogram();
2251 }
2252 };
2253
2254 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2255 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2256 uint64_t MethodArityHistogram::_total_compiled_calls;
2257 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2258 int MethodArityHistogram::_max_arity;
2259 int MethodArityHistogram::_max_size;
2260
2261 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2262 tty->print_cr("Calls from compiled code:");
2263 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2264 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2265 int64_t mono_i = _nof_interface_calls;
2266 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2267 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2268 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2269 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2270 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2271 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2272 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2273 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2274 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2275 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2276 tty->cr();
2277 tty->print_cr("Note 1: counter updates are not MT-safe.");
2278 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2279 tty->print_cr(" %% in nested categories are relative to their category");
2280 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2281 tty->cr();
2585 }
2586 #endif // INCLUDE_CDS
2587 if (entry == nullptr) {
2588 assert_lock_strong(AdapterHandlerLibrary_lock);
2589 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2590 if (entry_p != nullptr) {
2591 entry = *entry_p;
2592 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2593 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2594 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2595 #ifndef PRODUCT
2596 _runtime_hits++;
2597 #endif
2598 }
2599 }
2600 AdapterFingerPrint::deallocate(fp);
2601 return entry;
2602 }
2603
2604 #ifndef PRODUCT
2605 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2606 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2607 return sizeof(*key) + sizeof(*a);
2608 };
2609 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2610 ts.print(st, "AdapterHandlerTable");
2611 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2612 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2613 int total_hits = _archived_hits + _runtime_hits;
2614 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2615 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2616 }
2617 #endif // !PRODUCT
2618
2619 // ---------------------------------------------------------------------------
2620 // Implementation of AdapterHandlerLibrary
2621 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2622 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2623 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2624 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2625 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2626 #if INCLUDE_CDS
2627 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2628 #endif // INCLUDE_CDS
2629 static const int AdapterHandlerLibrary_size = 16*K;
2630 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2631 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2632
2633 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2634 assert(_buffer != nullptr, "should be initialized");
2635 return _buffer;
2636 }
2637
3520 };
3521 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3522 _adapter_handler_table->iterate(findblob_runtime_table);
3523 }
3524 assert(found, "Should have found handler");
3525 }
3526
3527 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3528 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3529 if (adapter_blob() != nullptr) {
3530 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3531 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3532 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3533 if (get_c2i_no_clinit_check_entry() != nullptr) {
3534 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3535 }
3536 }
3537 st->cr();
3538 }
3539
3540 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3541 assert(current == JavaThread::current(), "pre-condition");
3542 StackOverflow* overflow_state = current->stack_overflow_state();
3543 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3544 overflow_state->set_reserved_stack_activation(current->stack_base());
3545 JRT_END
3546
3547 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3548 ResourceMark rm(current);
3549 frame activation;
3550 nmethod* nm = nullptr;
3551 int count = 1;
3552
3553 assert(fr.is_java_frame(), "Must start on Java frame");
3554
3555 RegisterMap map(JavaThread::current(),
3556 RegisterMap::UpdateMap::skip,
3557 RegisterMap::ProcessFrames::skip,
3558 RegisterMap::WalkContinuation::skip); // don't walk continuations
3559 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|