51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomicAccess.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/osThread.hpp"
71 #include "runtime/perfData.hpp"
72 #include "runtime/sharedRuntime.hpp"
73 #include "runtime/stackWatermarkSet.hpp"
74 #include "runtime/stubRoutines.hpp"
75 #include "runtime/synchronizer.hpp"
76 #include "runtime/timerTrace.hpp"
77 #include "runtime/vframe.inline.hpp"
78 #include "runtime/vframeArray.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "utilities/copy.hpp"
81 #include "utilities/dtrace.hpp"
82 #include "utilities/events.hpp"
83 #include "utilities/globalDefinitions.hpp"
84 #include "utilities/hashTable.hpp"
85 #include "utilities/macros.hpp"
86 #include "utilities/xmlstream.hpp"
87 #ifdef COMPILER1
88 #include "c1/c1_Runtime1.hpp"
89 #endif
90 #ifdef COMPILER2
91 #include "opto/runtime.hpp"
92 #endif
93 #if INCLUDE_JFR
94 #include "jfr/jfr.inline.hpp"
95 #endif
96
97 // Shared runtime stub routines reside in their own unique blob with a
98 // single entry point
99
100
101 #define SHARED_STUB_FIELD_DEFINE(name, type) \
102 type* SharedRuntime::BLOB_FIELD_NAME(name);
103 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
104 #undef SHARED_STUB_FIELD_DEFINE
105
106 nmethod* SharedRuntime::_cont_doYield_stub;
107
108 #if 0
109 // TODO tweak global stub name generation to match this
110 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
111 const char *SharedRuntime::_stub_names[] = {
112 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
113 };
114 #endif
115
116 //----------------------------generate_stubs-----------------------------------
117 void SharedRuntime::generate_initial_stubs() {
118 // Build this early so it's available for the interpreter.
119 _throw_StackOverflowError_blob =
120 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
121 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
122 }
123
124 void SharedRuntime::generate_stubs() {
125 _wrong_method_blob =
126 generate_resolve_blob(StubId::shared_wrong_method_id,
127 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
157 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
158 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
159
160 #if COMPILER2_OR_JVMCI
161 // Vectors are generated only by C2 and JVMCI.
162 bool support_wide = is_wide_vector(MaxVectorSize);
163 if (support_wide) {
164 _polling_page_vectors_safepoint_handler_blob =
165 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
166 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
167 }
168 #endif // COMPILER2_OR_JVMCI
169 _polling_page_safepoint_handler_blob =
170 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
171 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
172 _polling_page_return_handler_blob =
173 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
174 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
175
176 generate_deopt_blob();
177 }
178
179 void SharedRuntime::init_adapter_library() {
180 AdapterHandlerLibrary::initialize();
181 }
182
183 #if INCLUDE_JFR
184 //------------------------------generate jfr runtime stubs ------
185 void SharedRuntime::generate_jfr_stubs() {
186 ResourceMark rm;
187 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
188 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
189
190 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
191 _jfr_return_lease_blob = generate_jfr_return_lease();
192 }
193
194 #endif // INCLUDE_JFR
195
196 #include <math.h>
197
198 // Implementation of SharedRuntime
199
200 #ifndef PRODUCT
201 // For statistics
202 uint SharedRuntime::_ic_miss_ctr = 0;
203 uint SharedRuntime::_wrong_method_ctr = 0;
204 uint SharedRuntime::_resolve_static_ctr = 0;
205 uint SharedRuntime::_resolve_virtual_ctr = 0;
206 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
207 uint SharedRuntime::_implicit_null_throws = 0;
208 uint SharedRuntime::_implicit_div0_throws = 0;
209
210 int64_t SharedRuntime::_nof_normal_calls = 0;
211 int64_t SharedRuntime::_nof_inlined_calls = 0;
212 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
213 int64_t SharedRuntime::_nof_static_calls = 0;
214 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
215 int64_t SharedRuntime::_nof_interface_calls = 0;
216 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
217
218 uint SharedRuntime::_new_instance_ctr=0;
219 uint SharedRuntime::_new_array_ctr=0;
220 uint SharedRuntime::_multi2_ctr=0;
221 uint SharedRuntime::_multi3_ctr=0;
222 uint SharedRuntime::_multi4_ctr=0;
223 uint SharedRuntime::_multi5_ctr=0;
224 uint SharedRuntime::_mon_enter_stub_ctr=0;
225 uint SharedRuntime::_mon_exit_stub_ctr=0;
226 uint SharedRuntime::_mon_enter_ctr=0;
240 uint SharedRuntime::_unsafe_set_memory_ctr=0;
241
242 int SharedRuntime::_ICmiss_index = 0;
243 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
244 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
245
246
247 void SharedRuntime::trace_ic_miss(address at) {
248 for (int i = 0; i < _ICmiss_index; i++) {
249 if (_ICmiss_at[i] == at) {
250 _ICmiss_count[i]++;
251 return;
252 }
253 }
254 int index = _ICmiss_index++;
255 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
256 _ICmiss_at[index] = at;
257 _ICmiss_count[index] = 1;
258 }
259
260 void SharedRuntime::print_ic_miss_histogram() {
261 if (ICMissHistogram) {
262 tty->print_cr("IC Miss Histogram:");
263 int tot_misses = 0;
264 for (int i = 0; i < _ICmiss_index; i++) {
265 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
266 tot_misses += _ICmiss_count[i];
267 }
268 tty->print_cr("Total IC misses: %7d", tot_misses);
269 }
270 }
271
272 #ifdef COMPILER2
273 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
274 void SharedRuntime::debug_print_value(jboolean x) {
275 tty->print_cr("boolean %d", x);
276 }
277
278 void SharedRuntime::debug_print_value(jbyte x) {
279 tty->print_cr("byte %d", x);
280 }
281
282 void SharedRuntime::debug_print_value(jshort x) {
283 tty->print_cr("short %d", x);
284 }
285
286 void SharedRuntime::debug_print_value(jchar x) {
287 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
288 }
734
735 void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
736 Handle h_exception = Exceptions::new_exception(current, name, message);
737 throw_and_post_jvmti_exception(current, h_exception);
738 }
739
740 // The interpreter code to call this tracing function is only
741 // called/generated when UL is on for redefine, class and has the right level
742 // and tags. Since obsolete methods are never compiled, we don't have
743 // to modify the compilers to generate calls to this function.
744 //
745 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
746 JavaThread* thread, Method* method))
747 if (method->is_obsolete()) {
748 // We are calling an obsolete method, but this is not necessarily
749 // an error. Our method could have been redefined just after we
750 // fetched the Method* from the constant pool.
751 ResourceMark rm;
752 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
753 }
754 return 0;
755 JRT_END
756
757 // ret_pc points into caller; we are returning caller's exception handler
758 // for given exception
759 // Note that the implementation of this method assumes it's only called when an exception has actually occured
760 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
761 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
762 assert(nm != nullptr, "must exist");
763 ResourceMark rm;
764
765 #if INCLUDE_JVMCI
766 if (nm->is_compiled_by_jvmci()) {
767 // lookup exception handler for this pc
768 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
769 ExceptionHandlerTable table(nm);
770 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
771 if (t != nullptr) {
772 return nm->code_begin() + t->pco();
773 } else {
1387
1388 // determine call info & receiver
1389 // note: a) receiver is null for static calls
1390 // b) an exception is thrown if receiver is null for non-static calls
1391 CallInfo call_info;
1392 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1393 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1394
1395 NoSafepointVerifier nsv;
1396
1397 methodHandle callee_method(current, call_info.selected_method());
1398
1399 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1400 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1401 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1402 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1403 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1404
1405 assert(!caller_nm->is_unloading(), "It should not be unloading");
1406
1407 #ifndef PRODUCT
1408 // tracing/debugging/statistics
1409 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1410 (is_virtual) ? (&_resolve_virtual_ctr) :
1411 (&_resolve_static_ctr);
1412 AtomicAccess::inc(addr);
1413
1414 if (TraceCallFixup) {
1415 ResourceMark rm(current);
1416 tty->print("resolving %s%s (%s) call to",
1417 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1418 Bytecodes::name(invoke_code));
1419 callee_method->print_short_name(tty);
1420 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1421 p2i(caller_frame.pc()), p2i(callee_method->code()));
1422 }
1423 #endif
1424
1425 if (invoke_code == Bytecodes::_invokestatic) {
1426 assert(callee_method->method_holder()->is_initialized() ||
1427 callee_method->method_holder()->is_reentrant_initialization(current),
1428 "invalid class initialization state for invoke_static");
1429 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1430 // In order to keep class initialization check, do not patch call
1431 // site for static call when the class is not fully initialized.
1432 // Proper check is enforced by call site re-resolution on every invocation.
1433 //
1449
1450 // Make sure the callee nmethod does not get deoptimized and removed before
1451 // we are done patching the code.
1452
1453
1454 CompiledICLocker ml(caller_nm);
1455 if (is_virtual && !is_optimized) {
1456 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1457 inline_cache->update(&call_info, receiver->klass());
1458 } else {
1459 // Callsite is a direct call - set it to the destination method
1460 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1461 callsite->set(callee_method);
1462 }
1463
1464 return callee_method;
1465 }
1466
1467 // Inline caches exist only in compiled code
1468 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1469 #ifdef ASSERT
1470 RegisterMap reg_map(current,
1471 RegisterMap::UpdateMap::skip,
1472 RegisterMap::ProcessFrames::include,
1473 RegisterMap::WalkContinuation::skip);
1474 frame stub_frame = current->last_frame();
1475 assert(stub_frame.is_runtime_frame(), "sanity check");
1476 frame caller_frame = stub_frame.sender(®_map);
1477 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1478 #endif /* ASSERT */
1479
1480 methodHandle callee_method;
1481 JRT_BLOCK
1482 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1483 // Return Method* through TLS
1484 current->set_vm_result_metadata(callee_method());
1485 JRT_BLOCK_END
1486 // return compiled code entry point after potential safepoints
1487 return get_resolved_entry(current, callee_method);
1488 JRT_END
1489
1490
1491 // Handle call site that has been made non-entrant
1492 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1493 // 6243940 We might end up in here if the callee is deoptimized
1494 // as we race to call it. We don't want to take a safepoint if
1495 // the caller was interpreted because the caller frame will look
1496 // interpreted to the stack walkers and arguments are now
1497 // "compiled" so it is much better to make this transition
1498 // invisible to the stack walking code. The i2c path will
1499 // place the callee method in the callee_target. It is stashed
1500 // there because if we try and find the callee by normal means a
1501 // safepoint is possible and have trouble gc'ing the compiled args.
1502 RegisterMap reg_map(current,
1503 RegisterMap::UpdateMap::skip,
1504 RegisterMap::ProcessFrames::include,
1505 RegisterMap::WalkContinuation::skip);
1506 frame stub_frame = current->last_frame();
1507 assert(stub_frame.is_runtime_frame(), "sanity check");
1508 frame caller_frame = stub_frame.sender(®_map);
1509
1510 if (caller_frame.is_interpreted_frame() ||
1511 caller_frame.is_entry_frame() ||
1512 caller_frame.is_upcall_stub_frame()) {
1525 // so bypassing it in c2i adapter is benign.
1526 return callee->get_c2i_no_clinit_check_entry();
1527 } else {
1528 return callee->get_c2i_entry();
1529 }
1530 }
1531
1532 // Must be compiled to compiled path which is safe to stackwalk
1533 methodHandle callee_method;
1534 JRT_BLOCK
1535 // Force resolving of caller (if we called from compiled frame)
1536 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1537 current->set_vm_result_metadata(callee_method());
1538 JRT_BLOCK_END
1539 // return compiled code entry point after potential safepoints
1540 return get_resolved_entry(current, callee_method);
1541 JRT_END
1542
1543 // Handle abstract method call
1544 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1545 // Verbose error message for AbstractMethodError.
1546 // Get the called method from the invoke bytecode.
1547 vframeStream vfst(current, true);
1548 assert(!vfst.at_end(), "Java frame must exist");
1549 methodHandle caller(current, vfst.method());
1550 Bytecode_invoke invoke(caller, vfst.bci());
1551 DEBUG_ONLY( invoke.verify(); )
1552
1553 // Find the compiled caller frame.
1554 RegisterMap reg_map(current,
1555 RegisterMap::UpdateMap::include,
1556 RegisterMap::ProcessFrames::include,
1557 RegisterMap::WalkContinuation::skip);
1558 frame stubFrame = current->last_frame();
1559 assert(stubFrame.is_runtime_frame(), "must be");
1560 frame callerFrame = stubFrame.sender(®_map);
1561 assert(callerFrame.is_compiled_frame(), "must be");
1562
1563 // Install exception and return forward entry.
1564 address res = SharedRuntime::throw_AbstractMethodError_entry();
1571 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1572 }
1573 JRT_BLOCK_END
1574 return res;
1575 JRT_END
1576
1577 // return verified_code_entry if interp_only_mode is not set for the current thread;
1578 // otherwise return c2i entry.
1579 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1580 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1581 // In interp_only_mode we need to go to the interpreted entry
1582 // The c2i won't patch in this mode -- see fixup_callers_callsite
1583 return callee_method->get_c2i_entry();
1584 }
1585 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1586 return callee_method->verified_code_entry();
1587 }
1588
1589 // resolve a static call and patch code
1590 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1591 methodHandle callee_method;
1592 bool enter_special = false;
1593 JRT_BLOCK
1594 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1595 current->set_vm_result_metadata(callee_method());
1596 JRT_BLOCK_END
1597 // return compiled code entry point after potential safepoints
1598 return get_resolved_entry(current, callee_method);
1599 JRT_END
1600
1601 // resolve virtual call and update inline cache to monomorphic
1602 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1603 methodHandle callee_method;
1604 JRT_BLOCK
1605 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1606 current->set_vm_result_metadata(callee_method());
1607 JRT_BLOCK_END
1608 // return compiled code entry point after potential safepoints
1609 return get_resolved_entry(current, callee_method);
1610 JRT_END
1611
1612
1613 // Resolve a virtual call that can be statically bound (e.g., always
1614 // monomorphic, so it has no inline cache). Patch code to resolved target.
1615 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1616 methodHandle callee_method;
1617 JRT_BLOCK
1618 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1619 current->set_vm_result_metadata(callee_method());
1620 JRT_BLOCK_END
1621 // return compiled code entry point after potential safepoints
1622 return get_resolved_entry(current, callee_method);
1623 JRT_END
1624
1625 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1626 JavaThread* current = THREAD;
1627 ResourceMark rm(current);
1628 CallInfo call_info;
1629 Bytecodes::Code bc;
1630
1631 // receiver is null for static calls. An exception is thrown for null
1632 // receivers for non-static calls
1633 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1634
1635 methodHandle callee_method(current, call_info.selected_method());
1636
1637 #ifndef PRODUCT
1638 AtomicAccess::inc(&_ic_miss_ctr);
1639
1640 // Statistics & Tracing
1641 if (TraceCallFixup) {
1642 ResourceMark rm(current);
1643 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1644 callee_method->print_short_name(tty);
1645 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1646 }
1647
1648 if (ICMissHistogram) {
1649 MutexLocker m(VMStatistic_lock);
1650 RegisterMap reg_map(current,
1651 RegisterMap::UpdateMap::skip,
1652 RegisterMap::ProcessFrames::include,
1653 RegisterMap::WalkContinuation::skip);
1654 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1655 // produce statistics under the lock
1656 trace_ic_miss(f.pc());
1657 }
1658 #endif
1659
1742 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1743 cdc->set_to_clean();
1744 break;
1745 }
1746
1747 case relocInfo::virtual_call_type: {
1748 // compiled, dispatched call (which used to call an interpreted method)
1749 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1750 inline_cache->set_to_clean();
1751 break;
1752 }
1753 default:
1754 break;
1755 }
1756 }
1757 }
1758 }
1759
1760 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1761
1762
1763 #ifndef PRODUCT
1764 AtomicAccess::inc(&_wrong_method_ctr);
1765
1766 if (TraceCallFixup) {
1767 ResourceMark rm(current);
1768 tty->print("handle_wrong_method reresolving call to");
1769 callee_method->print_short_name(tty);
1770 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1771 }
1772 #endif
1773
1774 return callee_method;
1775 }
1776
1777 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1778 // The faulting unsafe accesses should be changed to throw the error
1779 // synchronously instead. Meanwhile the faulting instruction will be
1780 // skipped over (effectively turning it into a no-op) and an
1781 // asynchronous exception will be raised which the thread will
1782 // handle at a later point. If the instruction is a load it will
1783 // return garbage.
1784
1785 // Request an async exception.
2028 if (CheckJNICalls) {
2029 fatal("Object has been unlocked by JNI");
2030 }
2031 return;
2032 }
2033 ObjectSynchronizer::exit(obj, lock, current);
2034 }
2035
2036 // Handles the uncommon cases of monitor unlocking in compiled code
2037 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2038 assert(current == JavaThread::current(), "pre-condition");
2039 SharedRuntime::monitor_exit_helper(obj, lock, current);
2040 JRT_END
2041
2042 #ifndef PRODUCT
2043
2044 void SharedRuntime::print_statistics() {
2045 ttyLocker ttyl;
2046 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2047
2048 SharedRuntime::print_ic_miss_histogram();
2049
2050 // Dump the JRT_ENTRY counters
2051 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2052 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2053 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2054 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2055 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2056 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2057
2058 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2059 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2060 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2061 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2062 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2063
2064 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2065 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2066 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2067 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2068 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2069 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2070 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2071 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2072 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2073 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2074 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2075 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2076 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2077 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2078 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2079 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2080 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2081
2082 AdapterHandlerLibrary::print_statistics();
2083
2084 if (xtty != nullptr) xtty->tail("statistics");
2085 }
2086
2087 inline double percent(int64_t x, int64_t y) {
2088 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2089 }
2090
2091 class MethodArityHistogram {
2092 public:
2093 enum { MAX_ARITY = 256 };
2094 private:
2095 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2096 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2097 static uint64_t _total_compiled_calls;
2098 static uint64_t _max_compiled_calls_per_method;
2099 static int _max_arity; // max. arity seen
2100 static int _max_size; // max. arg size seen
2101
2102 static void add_method_to_histogram(nmethod* nm) {
2103 Method* method = (nm == nullptr) ? nullptr : nm->method();
2104 if (method != nullptr) {
2105 ArgumentCount args(method->signature());
2106 int arity = args.size() + (method->is_static() ? 0 : 1);
2151 // Take the Compile_lock to protect against changes in the CodeBlob structures
2152 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2153 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2154 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2155 _max_arity = _max_size = 0;
2156 _total_compiled_calls = 0;
2157 _max_compiled_calls_per_method = 0;
2158 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2159 CodeCache::nmethods_do(add_method_to_histogram);
2160 print_histogram();
2161 }
2162 };
2163
2164 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2165 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2166 uint64_t MethodArityHistogram::_total_compiled_calls;
2167 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2168 int MethodArityHistogram::_max_arity;
2169 int MethodArityHistogram::_max_size;
2170
2171 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2172 tty->print_cr("Calls from compiled code:");
2173 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2174 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2175 int64_t mono_i = _nof_interface_calls;
2176 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2177 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2178 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2179 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2180 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2181 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2182 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2183 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2184 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2185 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2186 tty->cr();
2187 tty->print_cr("Note 1: counter updates are not MT-safe.");
2188 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2189 tty->print_cr(" %% in nested categories are relative to their category");
2190 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2191 tty->cr();
2495 }
2496 #endif // INCLUDE_CDS
2497 if (entry == nullptr) {
2498 assert_lock_strong(AdapterHandlerLibrary_lock);
2499 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2500 if (entry_p != nullptr) {
2501 entry = *entry_p;
2502 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2503 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2504 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2505 #ifndef PRODUCT
2506 _runtime_hits++;
2507 #endif
2508 }
2509 }
2510 AdapterFingerPrint::deallocate(fp);
2511 return entry;
2512 }
2513
2514 #ifndef PRODUCT
2515 static void print_table_statistics() {
2516 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2517 return sizeof(*key) + sizeof(*a);
2518 };
2519 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2520 ts.print(tty, "AdapterHandlerTable");
2521 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2522 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2523 int total_hits = _archived_hits + _runtime_hits;
2524 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2525 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2526 }
2527 #endif
2528
2529 // ---------------------------------------------------------------------------
2530 // Implementation of AdapterHandlerLibrary
2531 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2532 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2533 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2534 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2535 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2536 #if INCLUDE_CDS
2537 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2538 #endif // INCLUDE_CDS
2539 static const int AdapterHandlerLibrary_size = 16*K;
2540 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2541 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2542
2543 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2544 assert(_buffer != nullptr, "should be initialized");
2545 return _buffer;
2546 }
2547
3410 };
3411 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3412 _adapter_handler_table->iterate(findblob_runtime_table);
3413 }
3414 assert(found, "Should have found handler");
3415 }
3416
3417 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3418 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3419 if (adapter_blob() != nullptr) {
3420 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3421 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3422 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3423 if (get_c2i_no_clinit_check_entry() != nullptr) {
3424 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3425 }
3426 }
3427 st->cr();
3428 }
3429
3430 #ifndef PRODUCT
3431
3432 void AdapterHandlerLibrary::print_statistics() {
3433 print_table_statistics();
3434 }
3435
3436 #endif /* PRODUCT */
3437
3438 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3439 assert(current == JavaThread::current(), "pre-condition");
3440 StackOverflow* overflow_state = current->stack_overflow_state();
3441 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3442 overflow_state->set_reserved_stack_activation(current->stack_base());
3443 JRT_END
3444
3445 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3446 ResourceMark rm(current);
3447 frame activation;
3448 nmethod* nm = nullptr;
3449 int count = 1;
3450
3451 assert(fr.is_java_frame(), "Must start on Java frame");
3452
3453 RegisterMap map(JavaThread::current(),
3454 RegisterMap::UpdateMap::skip,
3455 RegisterMap::ProcessFrames::skip,
3456 RegisterMap::WalkContinuation::skip); // don't walk continuations
3457 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomicAccess.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/osThread.hpp"
71 #include "runtime/perfData.inline.hpp"
72 #include "runtime/sharedRuntime.hpp"
73 #include "runtime/stackWatermarkSet.hpp"
74 #include "runtime/stubRoutines.hpp"
75 #include "runtime/synchronizer.hpp"
76 #include "runtime/timerTrace.hpp"
77 #include "runtime/vframe.inline.hpp"
78 #include "runtime/vframeArray.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "services/management.hpp"
81 #include "utilities/copy.hpp"
82 #include "utilities/dtrace.hpp"
83 #include "utilities/events.hpp"
84 #include "utilities/globalDefinitions.hpp"
85 #include "utilities/hashTable.hpp"
86 #include "utilities/macros.hpp"
87 #include "utilities/xmlstream.hpp"
88 #ifdef COMPILER1
89 #include "c1/c1_Runtime1.hpp"
90 #endif
91 #ifdef COMPILER2
92 #include "opto/runtime.hpp"
93 #endif
94 #if INCLUDE_JFR
95 #include "jfr/jfr.inline.hpp"
96 #endif
97
98 // Shared runtime stub routines reside in their own unique blob with a
99 // single entry point
100
101
102 #define SHARED_STUB_FIELD_DEFINE(name, type) \
103 type* SharedRuntime::BLOB_FIELD_NAME(name);
104 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
105 #undef SHARED_STUB_FIELD_DEFINE
106
107 nmethod* SharedRuntime::_cont_doYield_stub;
108
109 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
110 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
111 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
112 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
113 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
114
115 #if 0
116 // TODO tweak global stub name generation to match this
117 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
118 const char *SharedRuntime::_stub_names[] = {
119 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
120 };
121 #endif
122
123 //----------------------------generate_stubs-----------------------------------
124 void SharedRuntime::generate_initial_stubs() {
125 // Build this early so it's available for the interpreter.
126 _throw_StackOverflowError_blob =
127 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
128 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
129 }
130
131 void SharedRuntime::generate_stubs() {
132 _wrong_method_blob =
133 generate_resolve_blob(StubId::shared_wrong_method_id,
134 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
164 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
165 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
166
167 #if COMPILER2_OR_JVMCI
168 // Vectors are generated only by C2 and JVMCI.
169 bool support_wide = is_wide_vector(MaxVectorSize);
170 if (support_wide) {
171 _polling_page_vectors_safepoint_handler_blob =
172 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
173 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
174 }
175 #endif // COMPILER2_OR_JVMCI
176 _polling_page_safepoint_handler_blob =
177 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
178 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
179 _polling_page_return_handler_blob =
180 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
181 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
182
183 generate_deopt_blob();
184
185 if (UsePerfData) {
186 EXCEPTION_MARK;
187 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
188 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
189 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
190 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
191 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
192 if (HAS_PENDING_EXCEPTION) {
193 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
194 }
195 }
196 }
197
198 void SharedRuntime::init_adapter_library() {
199 AdapterHandlerLibrary::initialize();
200 }
201
202 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
203 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
204 if (TraceThreadTime) {
205 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
206 }
207 st->print(" / %5d events", cnt);
208 st->cr();
209 }
210
211 void SharedRuntime::print_counters_on(outputStream* st) {
212 st->print_cr("SharedRuntime:");
213 if (UsePerfData) {
214 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
215 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
216 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
217 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
218 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
219
220 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
221 _perf_resolve_virtual_total_time->elapsed_counter_value() +
222 _perf_resolve_static_total_time->elapsed_counter_value() +
223 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
224 _perf_ic_miss_total_time->elapsed_counter_value());
225 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
226 if (TraceThreadTime) {
227 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
228 _perf_resolve_virtual_total_time->thread_counter_value() +
229 _perf_resolve_static_total_time->thread_counter_value() +
230 _perf_handle_wrong_method_total_time->thread_counter_value() +
231 _perf_ic_miss_total_time->thread_counter_value());
232 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
233
234 }
235 st->cr();
236 } else {
237 st->print_cr(" no data (UsePerfData is turned off)");
238 }
239 }
240
241 #if INCLUDE_JFR
242 //------------------------------generate jfr runtime stubs ------
243 void SharedRuntime::generate_jfr_stubs() {
244 ResourceMark rm;
245 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
246 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
247
248 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
249 _jfr_return_lease_blob = generate_jfr_return_lease();
250 }
251
252 #endif // INCLUDE_JFR
253
254 #include <math.h>
255
256 // Implementation of SharedRuntime
257
258 // For statistics
259 uint SharedRuntime::_ic_miss_ctr = 0;
260 uint SharedRuntime::_wrong_method_ctr = 0;
261 uint SharedRuntime::_resolve_static_ctr = 0;
262 uint SharedRuntime::_resolve_virtual_ctr = 0;
263 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
264
265 #ifndef PRODUCT
266 uint SharedRuntime::_implicit_null_throws = 0;
267 uint SharedRuntime::_implicit_div0_throws = 0;
268
269 int64_t SharedRuntime::_nof_normal_calls = 0;
270 int64_t SharedRuntime::_nof_inlined_calls = 0;
271 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
272 int64_t SharedRuntime::_nof_static_calls = 0;
273 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
274 int64_t SharedRuntime::_nof_interface_calls = 0;
275 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
276
277 uint SharedRuntime::_new_instance_ctr=0;
278 uint SharedRuntime::_new_array_ctr=0;
279 uint SharedRuntime::_multi2_ctr=0;
280 uint SharedRuntime::_multi3_ctr=0;
281 uint SharedRuntime::_multi4_ctr=0;
282 uint SharedRuntime::_multi5_ctr=0;
283 uint SharedRuntime::_mon_enter_stub_ctr=0;
284 uint SharedRuntime::_mon_exit_stub_ctr=0;
285 uint SharedRuntime::_mon_enter_ctr=0;
299 uint SharedRuntime::_unsafe_set_memory_ctr=0;
300
301 int SharedRuntime::_ICmiss_index = 0;
302 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
303 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
304
305
306 void SharedRuntime::trace_ic_miss(address at) {
307 for (int i = 0; i < _ICmiss_index; i++) {
308 if (_ICmiss_at[i] == at) {
309 _ICmiss_count[i]++;
310 return;
311 }
312 }
313 int index = _ICmiss_index++;
314 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
315 _ICmiss_at[index] = at;
316 _ICmiss_count[index] = 1;
317 }
318
319 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
320 if (ICMissHistogram) {
321 st->print_cr("IC Miss Histogram:");
322 int tot_misses = 0;
323 for (int i = 0; i < _ICmiss_index; i++) {
324 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
325 tot_misses += _ICmiss_count[i];
326 }
327 st->print_cr("Total IC misses: %7d", tot_misses);
328 }
329 }
330
331 #ifdef COMPILER2
332 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
333 void SharedRuntime::debug_print_value(jboolean x) {
334 tty->print_cr("boolean %d", x);
335 }
336
337 void SharedRuntime::debug_print_value(jbyte x) {
338 tty->print_cr("byte %d", x);
339 }
340
341 void SharedRuntime::debug_print_value(jshort x) {
342 tty->print_cr("short %d", x);
343 }
344
345 void SharedRuntime::debug_print_value(jchar x) {
346 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
347 }
793
794 void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
795 Handle h_exception = Exceptions::new_exception(current, name, message);
796 throw_and_post_jvmti_exception(current, h_exception);
797 }
798
799 // The interpreter code to call this tracing function is only
800 // called/generated when UL is on for redefine, class and has the right level
801 // and tags. Since obsolete methods are never compiled, we don't have
802 // to modify the compilers to generate calls to this function.
803 //
804 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
805 JavaThread* thread, Method* method))
806 if (method->is_obsolete()) {
807 // We are calling an obsolete method, but this is not necessarily
808 // an error. Our method could have been redefined just after we
809 // fetched the Method* from the constant pool.
810 ResourceMark rm;
811 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
812 }
813
814 LogStreamHandle(Trace, interpreter, bytecode) log;
815 if (log.is_enabled()) {
816 ResourceMark rm;
817 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
818 p2i(thread),
819 (method->is_static() ? "static" : "virtual"),
820 method->name_and_sig_as_C_string(),
821 (method->is_native() ? " native" : ""),
822 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
823 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
824 }
825 return 0;
826 JRT_END
827
828 // ret_pc points into caller; we are returning caller's exception handler
829 // for given exception
830 // Note that the implementation of this method assumes it's only called when an exception has actually occured
831 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
832 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
833 assert(nm != nullptr, "must exist");
834 ResourceMark rm;
835
836 #if INCLUDE_JVMCI
837 if (nm->is_compiled_by_jvmci()) {
838 // lookup exception handler for this pc
839 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
840 ExceptionHandlerTable table(nm);
841 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
842 if (t != nullptr) {
843 return nm->code_begin() + t->pco();
844 } else {
1458
1459 // determine call info & receiver
1460 // note: a) receiver is null for static calls
1461 // b) an exception is thrown if receiver is null for non-static calls
1462 CallInfo call_info;
1463 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1464 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1465
1466 NoSafepointVerifier nsv;
1467
1468 methodHandle callee_method(current, call_info.selected_method());
1469
1470 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1471 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1472 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1473 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1474 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1475
1476 assert(!caller_nm->is_unloading(), "It should not be unloading");
1477
1478 // tracing/debugging/statistics
1479 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1480 (is_virtual) ? (&_resolve_virtual_ctr) :
1481 (&_resolve_static_ctr);
1482 AtomicAccess::inc(addr);
1483
1484 #ifndef PRODUCT
1485 if (TraceCallFixup) {
1486 ResourceMark rm(current);
1487 tty->print("resolving %s%s (%s) call to",
1488 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1489 Bytecodes::name(invoke_code));
1490 callee_method->print_short_name(tty);
1491 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1492 p2i(caller_frame.pc()), p2i(callee_method->code()));
1493 }
1494 #endif
1495
1496 if (invoke_code == Bytecodes::_invokestatic) {
1497 assert(callee_method->method_holder()->is_initialized() ||
1498 callee_method->method_holder()->is_reentrant_initialization(current),
1499 "invalid class initialization state for invoke_static");
1500 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1501 // In order to keep class initialization check, do not patch call
1502 // site for static call when the class is not fully initialized.
1503 // Proper check is enforced by call site re-resolution on every invocation.
1504 //
1520
1521 // Make sure the callee nmethod does not get deoptimized and removed before
1522 // we are done patching the code.
1523
1524
1525 CompiledICLocker ml(caller_nm);
1526 if (is_virtual && !is_optimized) {
1527 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1528 inline_cache->update(&call_info, receiver->klass());
1529 } else {
1530 // Callsite is a direct call - set it to the destination method
1531 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1532 callsite->set(callee_method);
1533 }
1534
1535 return callee_method;
1536 }
1537
1538 // Inline caches exist only in compiled code
1539 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1540 PerfTraceTime timer(_perf_ic_miss_total_time);
1541
1542 #ifdef ASSERT
1543 RegisterMap reg_map(current,
1544 RegisterMap::UpdateMap::skip,
1545 RegisterMap::ProcessFrames::include,
1546 RegisterMap::WalkContinuation::skip);
1547 frame stub_frame = current->last_frame();
1548 assert(stub_frame.is_runtime_frame(), "sanity check");
1549 frame caller_frame = stub_frame.sender(®_map);
1550 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1551 #endif /* ASSERT */
1552
1553 methodHandle callee_method;
1554 JRT_BLOCK
1555 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1556 // Return Method* through TLS
1557 current->set_vm_result_metadata(callee_method());
1558 JRT_BLOCK_END
1559 // return compiled code entry point after potential safepoints
1560 return get_resolved_entry(current, callee_method);
1561 JRT_END
1562
1563
1564 // Handle call site that has been made non-entrant
1565 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1566 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1567
1568 // 6243940 We might end up in here if the callee is deoptimized
1569 // as we race to call it. We don't want to take a safepoint if
1570 // the caller was interpreted because the caller frame will look
1571 // interpreted to the stack walkers and arguments are now
1572 // "compiled" so it is much better to make this transition
1573 // invisible to the stack walking code. The i2c path will
1574 // place the callee method in the callee_target. It is stashed
1575 // there because if we try and find the callee by normal means a
1576 // safepoint is possible and have trouble gc'ing the compiled args.
1577 RegisterMap reg_map(current,
1578 RegisterMap::UpdateMap::skip,
1579 RegisterMap::ProcessFrames::include,
1580 RegisterMap::WalkContinuation::skip);
1581 frame stub_frame = current->last_frame();
1582 assert(stub_frame.is_runtime_frame(), "sanity check");
1583 frame caller_frame = stub_frame.sender(®_map);
1584
1585 if (caller_frame.is_interpreted_frame() ||
1586 caller_frame.is_entry_frame() ||
1587 caller_frame.is_upcall_stub_frame()) {
1600 // so bypassing it in c2i adapter is benign.
1601 return callee->get_c2i_no_clinit_check_entry();
1602 } else {
1603 return callee->get_c2i_entry();
1604 }
1605 }
1606
1607 // Must be compiled to compiled path which is safe to stackwalk
1608 methodHandle callee_method;
1609 JRT_BLOCK
1610 // Force resolving of caller (if we called from compiled frame)
1611 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1612 current->set_vm_result_metadata(callee_method());
1613 JRT_BLOCK_END
1614 // return compiled code entry point after potential safepoints
1615 return get_resolved_entry(current, callee_method);
1616 JRT_END
1617
1618 // Handle abstract method call
1619 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1620 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1621
1622 // Verbose error message for AbstractMethodError.
1623 // Get the called method from the invoke bytecode.
1624 vframeStream vfst(current, true);
1625 assert(!vfst.at_end(), "Java frame must exist");
1626 methodHandle caller(current, vfst.method());
1627 Bytecode_invoke invoke(caller, vfst.bci());
1628 DEBUG_ONLY( invoke.verify(); )
1629
1630 // Find the compiled caller frame.
1631 RegisterMap reg_map(current,
1632 RegisterMap::UpdateMap::include,
1633 RegisterMap::ProcessFrames::include,
1634 RegisterMap::WalkContinuation::skip);
1635 frame stubFrame = current->last_frame();
1636 assert(stubFrame.is_runtime_frame(), "must be");
1637 frame callerFrame = stubFrame.sender(®_map);
1638 assert(callerFrame.is_compiled_frame(), "must be");
1639
1640 // Install exception and return forward entry.
1641 address res = SharedRuntime::throw_AbstractMethodError_entry();
1648 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1649 }
1650 JRT_BLOCK_END
1651 return res;
1652 JRT_END
1653
1654 // return verified_code_entry if interp_only_mode is not set for the current thread;
1655 // otherwise return c2i entry.
1656 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1657 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1658 // In interp_only_mode we need to go to the interpreted entry
1659 // The c2i won't patch in this mode -- see fixup_callers_callsite
1660 return callee_method->get_c2i_entry();
1661 }
1662 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1663 return callee_method->verified_code_entry();
1664 }
1665
1666 // resolve a static call and patch code
1667 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1668 PerfTraceTime timer(_perf_resolve_static_total_time);
1669
1670 methodHandle callee_method;
1671 bool enter_special = false;
1672 JRT_BLOCK
1673 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1674 current->set_vm_result_metadata(callee_method());
1675 JRT_BLOCK_END
1676 // return compiled code entry point after potential safepoints
1677 return get_resolved_entry(current, callee_method);
1678 JRT_END
1679
1680 // resolve virtual call and update inline cache to monomorphic
1681 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1682 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1683
1684 methodHandle callee_method;
1685 JRT_BLOCK
1686 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1687 current->set_vm_result_metadata(callee_method());
1688 JRT_BLOCK_END
1689 // return compiled code entry point after potential safepoints
1690 return get_resolved_entry(current, callee_method);
1691 JRT_END
1692
1693
1694 // Resolve a virtual call that can be statically bound (e.g., always
1695 // monomorphic, so it has no inline cache). Patch code to resolved target.
1696 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1697 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1698
1699 methodHandle callee_method;
1700 JRT_BLOCK
1701 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1702 current->set_vm_result_metadata(callee_method());
1703 JRT_BLOCK_END
1704 // return compiled code entry point after potential safepoints
1705 return get_resolved_entry(current, callee_method);
1706 JRT_END
1707
1708 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1709 JavaThread* current = THREAD;
1710 ResourceMark rm(current);
1711 CallInfo call_info;
1712 Bytecodes::Code bc;
1713
1714 // receiver is null for static calls. An exception is thrown for null
1715 // receivers for non-static calls
1716 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1717
1718 methodHandle callee_method(current, call_info.selected_method());
1719
1720 AtomicAccess::inc(&_ic_miss_ctr);
1721
1722 #ifndef PRODUCT
1723 // Statistics & Tracing
1724 if (TraceCallFixup) {
1725 ResourceMark rm(current);
1726 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1727 callee_method->print_short_name(tty);
1728 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1729 }
1730
1731 if (ICMissHistogram) {
1732 MutexLocker m(VMStatistic_lock);
1733 RegisterMap reg_map(current,
1734 RegisterMap::UpdateMap::skip,
1735 RegisterMap::ProcessFrames::include,
1736 RegisterMap::WalkContinuation::skip);
1737 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1738 // produce statistics under the lock
1739 trace_ic_miss(f.pc());
1740 }
1741 #endif
1742
1825 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1826 cdc->set_to_clean();
1827 break;
1828 }
1829
1830 case relocInfo::virtual_call_type: {
1831 // compiled, dispatched call (which used to call an interpreted method)
1832 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1833 inline_cache->set_to_clean();
1834 break;
1835 }
1836 default:
1837 break;
1838 }
1839 }
1840 }
1841 }
1842
1843 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1844
1845 AtomicAccess::inc(&_wrong_method_ctr);
1846
1847 #ifndef PRODUCT
1848 if (TraceCallFixup) {
1849 ResourceMark rm(current);
1850 tty->print("handle_wrong_method reresolving call to");
1851 callee_method->print_short_name(tty);
1852 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1853 }
1854 #endif
1855
1856 return callee_method;
1857 }
1858
1859 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1860 // The faulting unsafe accesses should be changed to throw the error
1861 // synchronously instead. Meanwhile the faulting instruction will be
1862 // skipped over (effectively turning it into a no-op) and an
1863 // asynchronous exception will be raised which the thread will
1864 // handle at a later point. If the instruction is a load it will
1865 // return garbage.
1866
1867 // Request an async exception.
2110 if (CheckJNICalls) {
2111 fatal("Object has been unlocked by JNI");
2112 }
2113 return;
2114 }
2115 ObjectSynchronizer::exit(obj, lock, current);
2116 }
2117
2118 // Handles the uncommon cases of monitor unlocking in compiled code
2119 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2120 assert(current == JavaThread::current(), "pre-condition");
2121 SharedRuntime::monitor_exit_helper(obj, lock, current);
2122 JRT_END
2123
2124 #ifndef PRODUCT
2125
2126 void SharedRuntime::print_statistics() {
2127 ttyLocker ttyl;
2128 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2129
2130 SharedRuntime::print_ic_miss_histogram_on(tty);
2131 SharedRuntime::print_counters_on(tty);
2132 AdapterHandlerLibrary::print_statistics_on(tty);
2133
2134 if (xtty != nullptr) xtty->tail("statistics");
2135 }
2136
2137 //void SharedRuntime::print_counters_on(outputStream* st) {
2138 // // Dump the JRT_ENTRY counters
2139 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2140 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2141 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2142 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2143 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2144 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2145 //
2146 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2147 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2148 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2149 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2150 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2151 //
2152 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2153 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2154 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2155 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2156 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2157 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2158 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2159 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2160 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2161 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2162 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2163 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2164 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2165 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2166 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2167 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2168 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2169 //}
2170
2171 inline double percent(int64_t x, int64_t y) {
2172 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2173 }
2174
2175 class MethodArityHistogram {
2176 public:
2177 enum { MAX_ARITY = 256 };
2178 private:
2179 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2180 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2181 static uint64_t _total_compiled_calls;
2182 static uint64_t _max_compiled_calls_per_method;
2183 static int _max_arity; // max. arity seen
2184 static int _max_size; // max. arg size seen
2185
2186 static void add_method_to_histogram(nmethod* nm) {
2187 Method* method = (nm == nullptr) ? nullptr : nm->method();
2188 if (method != nullptr) {
2189 ArgumentCount args(method->signature());
2190 int arity = args.size() + (method->is_static() ? 0 : 1);
2235 // Take the Compile_lock to protect against changes in the CodeBlob structures
2236 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2237 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2238 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2239 _max_arity = _max_size = 0;
2240 _total_compiled_calls = 0;
2241 _max_compiled_calls_per_method = 0;
2242 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2243 CodeCache::nmethods_do(add_method_to_histogram);
2244 print_histogram();
2245 }
2246 };
2247
2248 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2249 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2250 uint64_t MethodArityHistogram::_total_compiled_calls;
2251 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2252 int MethodArityHistogram::_max_arity;
2253 int MethodArityHistogram::_max_size;
2254
2255 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2256 tty->print_cr("Calls from compiled code:");
2257 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2258 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2259 int64_t mono_i = _nof_interface_calls;
2260 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2261 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2262 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2263 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2264 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2265 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2266 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2267 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2268 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2269 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2270 tty->cr();
2271 tty->print_cr("Note 1: counter updates are not MT-safe.");
2272 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2273 tty->print_cr(" %% in nested categories are relative to their category");
2274 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2275 tty->cr();
2579 }
2580 #endif // INCLUDE_CDS
2581 if (entry == nullptr) {
2582 assert_lock_strong(AdapterHandlerLibrary_lock);
2583 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2584 if (entry_p != nullptr) {
2585 entry = *entry_p;
2586 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2587 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2588 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2589 #ifndef PRODUCT
2590 _runtime_hits++;
2591 #endif
2592 }
2593 }
2594 AdapterFingerPrint::deallocate(fp);
2595 return entry;
2596 }
2597
2598 #ifndef PRODUCT
2599 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2600 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2601 return sizeof(*key) + sizeof(*a);
2602 };
2603 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2604 ts.print(st, "AdapterHandlerTable");
2605 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2606 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2607 int total_hits = _archived_hits + _runtime_hits;
2608 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2609 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2610 }
2611 #endif // !PRODUCT
2612
2613 // ---------------------------------------------------------------------------
2614 // Implementation of AdapterHandlerLibrary
2615 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2616 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2617 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2618 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2619 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2620 #if INCLUDE_CDS
2621 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2622 #endif // INCLUDE_CDS
2623 static const int AdapterHandlerLibrary_size = 16*K;
2624 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2625 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2626
2627 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2628 assert(_buffer != nullptr, "should be initialized");
2629 return _buffer;
2630 }
2631
3494 };
3495 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3496 _adapter_handler_table->iterate(findblob_runtime_table);
3497 }
3498 assert(found, "Should have found handler");
3499 }
3500
3501 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3502 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3503 if (adapter_blob() != nullptr) {
3504 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3505 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3506 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3507 if (get_c2i_no_clinit_check_entry() != nullptr) {
3508 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3509 }
3510 }
3511 st->cr();
3512 }
3513
3514 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3515 assert(current == JavaThread::current(), "pre-condition");
3516 StackOverflow* overflow_state = current->stack_overflow_state();
3517 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3518 overflow_state->set_reserved_stack_activation(current->stack_base());
3519 JRT_END
3520
3521 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3522 ResourceMark rm(current);
3523 frame activation;
3524 nmethod* nm = nullptr;
3525 int count = 1;
3526
3527 assert(fr.is_java_frame(), "Must start on Java frame");
3528
3529 RegisterMap map(JavaThread::current(),
3530 RegisterMap::UpdateMap::skip,
3531 RegisterMap::ProcessFrames::skip,
3532 RegisterMap::WalkContinuation::skip); // don't walk continuations
3533 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|