53 #include "oops/klass.hpp"
54 #include "oops/method.inline.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/oop.inline.hpp"
57 #include "prims/forte.hpp"
58 #include "prims/jvmtiExport.hpp"
59 #include "prims/jvmtiThreadState.hpp"
60 #include "prims/methodHandles.hpp"
61 #include "prims/nativeLookup.hpp"
62 #include "runtime/arguments.hpp"
63 #include "runtime/atomicAccess.hpp"
64 #include "runtime/basicLock.inline.hpp"
65 #include "runtime/frame.inline.hpp"
66 #include "runtime/handles.inline.hpp"
67 #include "runtime/init.hpp"
68 #include "runtime/interfaceSupport.inline.hpp"
69 #include "runtime/java.hpp"
70 #include "runtime/javaCalls.hpp"
71 #include "runtime/jniHandles.inline.hpp"
72 #include "runtime/osThread.hpp"
73 #include "runtime/perfData.hpp"
74 #include "runtime/sharedRuntime.hpp"
75 #include "runtime/stackWatermarkSet.hpp"
76 #include "runtime/stubRoutines.hpp"
77 #include "runtime/synchronizer.hpp"
78 #include "runtime/timerTrace.hpp"
79 #include "runtime/vframe.inline.hpp"
80 #include "runtime/vframeArray.hpp"
81 #include "runtime/vm_version.hpp"
82 #include "utilities/copy.hpp"
83 #include "utilities/dtrace.hpp"
84 #include "utilities/events.hpp"
85 #include "utilities/globalDefinitions.hpp"
86 #include "utilities/hashTable.hpp"
87 #include "utilities/macros.hpp"
88 #include "utilities/xmlstream.hpp"
89 #ifdef COMPILER1
90 #include "c1/c1_Runtime1.hpp"
91 #endif
92 #ifdef COMPILER2
93 #include "opto/runtime.hpp"
94 #endif
95 #if INCLUDE_JFR
96 #include "jfr/jfr.inline.hpp"
97 #endif
98
99 // Shared runtime stub routines reside in their own unique blob with a
100 // single entry point
101
102
103 #define SHARED_STUB_FIELD_DEFINE(name, type) \
104 type* SharedRuntime::BLOB_FIELD_NAME(name);
105 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
106 #undef SHARED_STUB_FIELD_DEFINE
107
108 nmethod* SharedRuntime::_cont_doYield_stub;
109
110 #if 0
111 // TODO tweak global stub name generation to match this
112 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
113 const char *SharedRuntime::_stub_names[] = {
114 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
115 };
116 #endif
117
118 //----------------------------generate_stubs-----------------------------------
119 void SharedRuntime::generate_initial_stubs() {
120 // Build this early so it's available for the interpreter.
121 _throw_StackOverflowError_blob =
122 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
123 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
124 }
125
126 void SharedRuntime::generate_stubs() {
127 _wrong_method_blob =
128 generate_resolve_blob(StubId::shared_wrong_method_id,
129 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
159 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
160 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
161
162 #if COMPILER2_OR_JVMCI
163 // Vectors are generated only by C2 and JVMCI.
164 bool support_wide = is_wide_vector(MaxVectorSize);
165 if (support_wide) {
166 _polling_page_vectors_safepoint_handler_blob =
167 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
168 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
169 }
170 #endif // COMPILER2_OR_JVMCI
171 _polling_page_safepoint_handler_blob =
172 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
173 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
174 _polling_page_return_handler_blob =
175 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
176 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
177
178 generate_deopt_blob();
179 }
180
181 void SharedRuntime::init_adapter_library() {
182 AdapterHandlerLibrary::initialize();
183 }
184
185 #if INCLUDE_JFR
186 //------------------------------generate jfr runtime stubs ------
187 void SharedRuntime::generate_jfr_stubs() {
188 ResourceMark rm;
189 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
190 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
191
192 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
193 _jfr_return_lease_blob = generate_jfr_return_lease();
194 }
195
196 #endif // INCLUDE_JFR
197
198 #include <math.h>
199
200 // Implementation of SharedRuntime
201
202 #ifndef PRODUCT
203 // For statistics
204 uint SharedRuntime::_ic_miss_ctr = 0;
205 uint SharedRuntime::_wrong_method_ctr = 0;
206 uint SharedRuntime::_resolve_static_ctr = 0;
207 uint SharedRuntime::_resolve_virtual_ctr = 0;
208 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
209 uint SharedRuntime::_implicit_null_throws = 0;
210 uint SharedRuntime::_implicit_div0_throws = 0;
211
212 int64_t SharedRuntime::_nof_normal_calls = 0;
213 int64_t SharedRuntime::_nof_inlined_calls = 0;
214 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
215 int64_t SharedRuntime::_nof_static_calls = 0;
216 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
217 int64_t SharedRuntime::_nof_interface_calls = 0;
218 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
219
220 uint SharedRuntime::_new_instance_ctr=0;
221 uint SharedRuntime::_new_array_ctr=0;
222 uint SharedRuntime::_multi2_ctr=0;
223 uint SharedRuntime::_multi3_ctr=0;
224 uint SharedRuntime::_multi4_ctr=0;
225 uint SharedRuntime::_multi5_ctr=0;
226 uint SharedRuntime::_mon_enter_stub_ctr=0;
227 uint SharedRuntime::_mon_exit_stub_ctr=0;
228 uint SharedRuntime::_mon_enter_ctr=0;
242 uint SharedRuntime::_unsafe_set_memory_ctr=0;
243
244 int SharedRuntime::_ICmiss_index = 0;
245 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
246 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
247
248
249 void SharedRuntime::trace_ic_miss(address at) {
250 for (int i = 0; i < _ICmiss_index; i++) {
251 if (_ICmiss_at[i] == at) {
252 _ICmiss_count[i]++;
253 return;
254 }
255 }
256 int index = _ICmiss_index++;
257 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
258 _ICmiss_at[index] = at;
259 _ICmiss_count[index] = 1;
260 }
261
262 void SharedRuntime::print_ic_miss_histogram() {
263 if (ICMissHistogram) {
264 tty->print_cr("IC Miss Histogram:");
265 int tot_misses = 0;
266 for (int i = 0; i < _ICmiss_index; i++) {
267 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
268 tot_misses += _ICmiss_count[i];
269 }
270 tty->print_cr("Total IC misses: %7d", tot_misses);
271 }
272 }
273
274 #ifdef COMPILER2
275 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
276 void SharedRuntime::debug_print_value(jboolean x) {
277 tty->print_cr("boolean %d", x);
278 }
279
280 void SharedRuntime::debug_print_value(jbyte x) {
281 tty->print_cr("byte %d", x);
282 }
283
284 void SharedRuntime::debug_print_value(jshort x) {
285 tty->print_cr("short %d", x);
286 }
287
288 void SharedRuntime::debug_print_value(jchar x) {
289 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
290 }
736
737 void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
738 Handle h_exception = Exceptions::new_exception(current, name, message);
739 throw_and_post_jvmti_exception(current, h_exception);
740 }
741
742 // The interpreter code to call this tracing function is only
743 // called/generated when UL is on for redefine, class and has the right level
744 // and tags. Since obsolete methods are never compiled, we don't have
745 // to modify the compilers to generate calls to this function.
746 //
747 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
748 JavaThread* thread, Method* method))
749 if (method->is_obsolete()) {
750 // We are calling an obsolete method, but this is not necessarily
751 // an error. Our method could have been redefined just after we
752 // fetched the Method* from the constant pool.
753 ResourceMark rm;
754 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
755 }
756 return 0;
757 JRT_END
758
759 // ret_pc points into caller; we are returning caller's exception handler
760 // for given exception
761 // Note that the implementation of this method assumes it's only called when an exception has actually occured
762 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
763 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
764 assert(nm != nullptr, "must exist");
765 ResourceMark rm;
766
767 #if INCLUDE_JVMCI
768 if (nm->is_compiled_by_jvmci()) {
769 // lookup exception handler for this pc
770 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
771 ExceptionHandlerTable table(nm);
772 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
773 if (t != nullptr) {
774 return nm->code_begin() + t->pco();
775 } else {
1389
1390 // determine call info & receiver
1391 // note: a) receiver is null for static calls
1392 // b) an exception is thrown if receiver is null for non-static calls
1393 CallInfo call_info;
1394 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1395 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1396
1397 NoSafepointVerifier nsv;
1398
1399 methodHandle callee_method(current, call_info.selected_method());
1400
1401 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1402 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1403 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1404 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1405 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1406
1407 assert(!caller_nm->is_unloading(), "It should not be unloading");
1408
1409 #ifndef PRODUCT
1410 // tracing/debugging/statistics
1411 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1412 (is_virtual) ? (&_resolve_virtual_ctr) :
1413 (&_resolve_static_ctr);
1414 AtomicAccess::inc(addr);
1415
1416 if (TraceCallFixup) {
1417 ResourceMark rm(current);
1418 tty->print("resolving %s%s (%s) call to",
1419 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1420 Bytecodes::name(invoke_code));
1421 callee_method->print_short_name(tty);
1422 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1423 p2i(caller_frame.pc()), p2i(callee_method->code()));
1424 }
1425 #endif
1426
1427 if (invoke_code == Bytecodes::_invokestatic) {
1428 assert(callee_method->method_holder()->is_initialized() ||
1429 callee_method->method_holder()->is_reentrant_initialization(current),
1430 "invalid class initialization state for invoke_static");
1431 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1432 // In order to keep class initialization check, do not patch call
1433 // site for static call when the class is not fully initialized.
1434 // Proper check is enforced by call site re-resolution on every invocation.
1435 //
1451
1452 // Make sure the callee nmethod does not get deoptimized and removed before
1453 // we are done patching the code.
1454
1455
1456 CompiledICLocker ml(caller_nm);
1457 if (is_virtual && !is_optimized) {
1458 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1459 inline_cache->update(&call_info, receiver->klass());
1460 } else {
1461 // Callsite is a direct call - set it to the destination method
1462 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1463 callsite->set(callee_method);
1464 }
1465
1466 return callee_method;
1467 }
1468
1469 // Inline caches exist only in compiled code
1470 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1471 #ifdef ASSERT
1472 RegisterMap reg_map(current,
1473 RegisterMap::UpdateMap::skip,
1474 RegisterMap::ProcessFrames::include,
1475 RegisterMap::WalkContinuation::skip);
1476 frame stub_frame = current->last_frame();
1477 assert(stub_frame.is_runtime_frame(), "sanity check");
1478 frame caller_frame = stub_frame.sender(®_map);
1479 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1480 #endif /* ASSERT */
1481
1482 methodHandle callee_method;
1483 JRT_BLOCK
1484 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1485 // Return Method* through TLS
1486 current->set_vm_result_metadata(callee_method());
1487 JRT_BLOCK_END
1488 // return compiled code entry point after potential safepoints
1489 return get_resolved_entry(current, callee_method);
1490 JRT_END
1491
1492
1493 // Handle call site that has been made non-entrant
1494 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1495 // 6243940 We might end up in here if the callee is deoptimized
1496 // as we race to call it. We don't want to take a safepoint if
1497 // the caller was interpreted because the caller frame will look
1498 // interpreted to the stack walkers and arguments are now
1499 // "compiled" so it is much better to make this transition
1500 // invisible to the stack walking code. The i2c path will
1501 // place the callee method in the callee_target. It is stashed
1502 // there because if we try and find the callee by normal means a
1503 // safepoint is possible and have trouble gc'ing the compiled args.
1504 RegisterMap reg_map(current,
1505 RegisterMap::UpdateMap::skip,
1506 RegisterMap::ProcessFrames::include,
1507 RegisterMap::WalkContinuation::skip);
1508 frame stub_frame = current->last_frame();
1509 assert(stub_frame.is_runtime_frame(), "sanity check");
1510 frame caller_frame = stub_frame.sender(®_map);
1511
1512 if (caller_frame.is_interpreted_frame() ||
1513 caller_frame.is_entry_frame() ||
1514 caller_frame.is_upcall_stub_frame()) {
1527 // so bypassing it in c2i adapter is benign.
1528 return callee->get_c2i_no_clinit_check_entry();
1529 } else {
1530 return callee->get_c2i_entry();
1531 }
1532 }
1533
1534 // Must be compiled to compiled path which is safe to stackwalk
1535 methodHandle callee_method;
1536 JRT_BLOCK
1537 // Force resolving of caller (if we called from compiled frame)
1538 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1539 current->set_vm_result_metadata(callee_method());
1540 JRT_BLOCK_END
1541 // return compiled code entry point after potential safepoints
1542 return get_resolved_entry(current, callee_method);
1543 JRT_END
1544
1545 // Handle abstract method call
1546 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1547 // Verbose error message for AbstractMethodError.
1548 // Get the called method from the invoke bytecode.
1549 vframeStream vfst(current, true);
1550 assert(!vfst.at_end(), "Java frame must exist");
1551 methodHandle caller(current, vfst.method());
1552 Bytecode_invoke invoke(caller, vfst.bci());
1553 DEBUG_ONLY( invoke.verify(); )
1554
1555 // Find the compiled caller frame.
1556 RegisterMap reg_map(current,
1557 RegisterMap::UpdateMap::include,
1558 RegisterMap::ProcessFrames::include,
1559 RegisterMap::WalkContinuation::skip);
1560 frame stubFrame = current->last_frame();
1561 assert(stubFrame.is_runtime_frame(), "must be");
1562 frame callerFrame = stubFrame.sender(®_map);
1563 assert(callerFrame.is_compiled_frame(), "must be");
1564
1565 // Install exception and return forward entry.
1566 address res = SharedRuntime::throw_AbstractMethodError_entry();
1573 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1574 }
1575 JRT_BLOCK_END
1576 return res;
1577 JRT_END
1578
1579 // return verified_code_entry if interp_only_mode is not set for the current thread;
1580 // otherwise return c2i entry.
1581 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1582 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1583 // In interp_only_mode we need to go to the interpreted entry
1584 // The c2i won't patch in this mode -- see fixup_callers_callsite
1585 return callee_method->get_c2i_entry();
1586 }
1587 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1588 return callee_method->verified_code_entry();
1589 }
1590
1591 // resolve a static call and patch code
1592 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1593 methodHandle callee_method;
1594 bool enter_special = false;
1595 JRT_BLOCK
1596 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1597 current->set_vm_result_metadata(callee_method());
1598 JRT_BLOCK_END
1599 // return compiled code entry point after potential safepoints
1600 return get_resolved_entry(current, callee_method);
1601 JRT_END
1602
1603 // resolve virtual call and update inline cache to monomorphic
1604 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1605 methodHandle callee_method;
1606 JRT_BLOCK
1607 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1608 current->set_vm_result_metadata(callee_method());
1609 JRT_BLOCK_END
1610 // return compiled code entry point after potential safepoints
1611 return get_resolved_entry(current, callee_method);
1612 JRT_END
1613
1614
1615 // Resolve a virtual call that can be statically bound (e.g., always
1616 // monomorphic, so it has no inline cache). Patch code to resolved target.
1617 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1618 methodHandle callee_method;
1619 JRT_BLOCK
1620 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1621 current->set_vm_result_metadata(callee_method());
1622 JRT_BLOCK_END
1623 // return compiled code entry point after potential safepoints
1624 return get_resolved_entry(current, callee_method);
1625 JRT_END
1626
1627 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1628 JavaThread* current = THREAD;
1629 ResourceMark rm(current);
1630 CallInfo call_info;
1631 Bytecodes::Code bc;
1632
1633 // receiver is null for static calls. An exception is thrown for null
1634 // receivers for non-static calls
1635 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1636
1637 methodHandle callee_method(current, call_info.selected_method());
1638
1639 #ifndef PRODUCT
1640 AtomicAccess::inc(&_ic_miss_ctr);
1641
1642 // Statistics & Tracing
1643 if (TraceCallFixup) {
1644 ResourceMark rm(current);
1645 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1646 callee_method->print_short_name(tty);
1647 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1648 }
1649
1650 if (ICMissHistogram) {
1651 MutexLocker m(VMStatistic_lock);
1652 RegisterMap reg_map(current,
1653 RegisterMap::UpdateMap::skip,
1654 RegisterMap::ProcessFrames::include,
1655 RegisterMap::WalkContinuation::skip);
1656 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1657 // produce statistics under the lock
1658 trace_ic_miss(f.pc());
1659 }
1660 #endif
1661
1744 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1745 cdc->set_to_clean();
1746 break;
1747 }
1748
1749 case relocInfo::virtual_call_type: {
1750 // compiled, dispatched call (which used to call an interpreted method)
1751 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1752 inline_cache->set_to_clean();
1753 break;
1754 }
1755 default:
1756 break;
1757 }
1758 }
1759 }
1760 }
1761
1762 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1763
1764
1765 #ifndef PRODUCT
1766 AtomicAccess::inc(&_wrong_method_ctr);
1767
1768 if (TraceCallFixup) {
1769 ResourceMark rm(current);
1770 tty->print("handle_wrong_method reresolving call to");
1771 callee_method->print_short_name(tty);
1772 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1773 }
1774 #endif
1775
1776 return callee_method;
1777 }
1778
1779 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1780 // The faulting unsafe accesses should be changed to throw the error
1781 // synchronously instead. Meanwhile the faulting instruction will be
1782 // skipped over (effectively turning it into a no-op) and an
1783 // asynchronous exception will be raised which the thread will
1784 // handle at a later point. If the instruction is a load it will
1785 // return garbage.
1786
1787 // Request an async exception.
2030 if (CheckJNICalls) {
2031 fatal("Object has been unlocked by JNI");
2032 }
2033 return;
2034 }
2035 ObjectSynchronizer::exit(obj, lock, current);
2036 }
2037
2038 // Handles the uncommon cases of monitor unlocking in compiled code
2039 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2040 assert(current == JavaThread::current(), "pre-condition");
2041 SharedRuntime::monitor_exit_helper(obj, lock, current);
2042 JRT_END
2043
2044 #ifndef PRODUCT
2045
2046 void SharedRuntime::print_statistics() {
2047 ttyLocker ttyl;
2048 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2049
2050 SharedRuntime::print_ic_miss_histogram();
2051
2052 // Dump the JRT_ENTRY counters
2053 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2054 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2055 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2056 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2057 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2058 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2059
2060 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2061 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2062 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2063 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2064 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2065
2066 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2067 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2068 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2069 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2070 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2071 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2072 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2073 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2074 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2075 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2076 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2077 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2078 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2079 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2080 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2081 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2082 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2083
2084 AdapterHandlerLibrary::print_statistics();
2085
2086 if (xtty != nullptr) xtty->tail("statistics");
2087 }
2088
2089 inline double percent(int64_t x, int64_t y) {
2090 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2091 }
2092
2093 class MethodArityHistogram {
2094 public:
2095 enum { MAX_ARITY = 256 };
2096 private:
2097 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2098 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2099 static uint64_t _total_compiled_calls;
2100 static uint64_t _max_compiled_calls_per_method;
2101 static int _max_arity; // max. arity seen
2102 static int _max_size; // max. arg size seen
2103
2104 static void add_method_to_histogram(nmethod* nm) {
2105 Method* method = (nm == nullptr) ? nullptr : nm->method();
2106 if (method != nullptr) {
2107 ArgumentCount args(method->signature());
2108 int arity = args.size() + (method->is_static() ? 0 : 1);
2153 // Take the Compile_lock to protect against changes in the CodeBlob structures
2154 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2155 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2156 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2157 _max_arity = _max_size = 0;
2158 _total_compiled_calls = 0;
2159 _max_compiled_calls_per_method = 0;
2160 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2161 CodeCache::nmethods_do(add_method_to_histogram);
2162 print_histogram();
2163 }
2164 };
2165
2166 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2167 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2168 uint64_t MethodArityHistogram::_total_compiled_calls;
2169 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2170 int MethodArityHistogram::_max_arity;
2171 int MethodArityHistogram::_max_size;
2172
2173 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2174 tty->print_cr("Calls from compiled code:");
2175 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2176 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2177 int64_t mono_i = _nof_interface_calls;
2178 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2179 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2180 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2181 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2182 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2183 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2184 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2185 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2186 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2187 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2188 tty->cr();
2189 tty->print_cr("Note 1: counter updates are not MT-safe.");
2190 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2191 tty->print_cr(" %% in nested categories are relative to their category");
2192 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2193 tty->cr();
2497 }
2498 #endif // INCLUDE_CDS
2499 if (entry == nullptr) {
2500 assert_lock_strong(AdapterHandlerLibrary_lock);
2501 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2502 if (entry_p != nullptr) {
2503 entry = *entry_p;
2504 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2505 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2506 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2507 #ifndef PRODUCT
2508 _runtime_hits++;
2509 #endif
2510 }
2511 }
2512 AdapterFingerPrint::deallocate(fp);
2513 return entry;
2514 }
2515
2516 #ifndef PRODUCT
2517 static void print_table_statistics() {
2518 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2519 return sizeof(*key) + sizeof(*a);
2520 };
2521 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2522 ts.print(tty, "AdapterHandlerTable");
2523 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2524 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2525 int total_hits = _archived_hits + _runtime_hits;
2526 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2527 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2528 }
2529 #endif
2530
2531 // ---------------------------------------------------------------------------
2532 // Implementation of AdapterHandlerLibrary
2533 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2534 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2535 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2536 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2537 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2538 #if INCLUDE_CDS
2539 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2540 #endif // INCLUDE_CDS
2541 static const int AdapterHandlerLibrary_size = 16*K;
2542 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2543 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2544
2545 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2546 assert(_buffer != nullptr, "should be initialized");
2547 return _buffer;
2548 }
2549
3411 };
3412 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3413 _adapter_handler_table->iterate(findblob_runtime_table);
3414 }
3415 assert(found, "Should have found handler");
3416 }
3417
3418 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3419 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3420 if (adapter_blob() != nullptr) {
3421 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3422 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3423 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3424 if (get_c2i_no_clinit_check_entry() != nullptr) {
3425 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3426 }
3427 }
3428 st->cr();
3429 }
3430
3431 #ifndef PRODUCT
3432
3433 void AdapterHandlerLibrary::print_statistics() {
3434 print_table_statistics();
3435 }
3436
3437 #endif /* PRODUCT */
3438
3439 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3440 assert(current == JavaThread::current(), "pre-condition");
3441 StackOverflow* overflow_state = current->stack_overflow_state();
3442 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3443 overflow_state->set_reserved_stack_activation(current->stack_base());
3444 JRT_END
3445
3446 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3447 ResourceMark rm(current);
3448 frame activation;
3449 nmethod* nm = nullptr;
3450 int count = 1;
3451
3452 assert(fr.is_java_frame(), "Must start on Java frame");
3453
3454 RegisterMap map(JavaThread::current(),
3455 RegisterMap::UpdateMap::skip,
3456 RegisterMap::ProcessFrames::skip,
3457 RegisterMap::WalkContinuation::skip); // don't walk continuations
3458 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
53 #include "oops/klass.hpp"
54 #include "oops/method.inline.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/oop.inline.hpp"
57 #include "prims/forte.hpp"
58 #include "prims/jvmtiExport.hpp"
59 #include "prims/jvmtiThreadState.hpp"
60 #include "prims/methodHandles.hpp"
61 #include "prims/nativeLookup.hpp"
62 #include "runtime/arguments.hpp"
63 #include "runtime/atomicAccess.hpp"
64 #include "runtime/basicLock.inline.hpp"
65 #include "runtime/frame.inline.hpp"
66 #include "runtime/handles.inline.hpp"
67 #include "runtime/init.hpp"
68 #include "runtime/interfaceSupport.inline.hpp"
69 #include "runtime/java.hpp"
70 #include "runtime/javaCalls.hpp"
71 #include "runtime/jniHandles.inline.hpp"
72 #include "runtime/osThread.hpp"
73 #include "runtime/perfData.inline.hpp"
74 #include "runtime/sharedRuntime.hpp"
75 #include "runtime/stackWatermarkSet.hpp"
76 #include "runtime/stubRoutines.hpp"
77 #include "runtime/synchronizer.hpp"
78 #include "runtime/timerTrace.hpp"
79 #include "runtime/vframe.inline.hpp"
80 #include "runtime/vframeArray.hpp"
81 #include "runtime/vm_version.hpp"
82 #include "services/management.hpp"
83 #include "utilities/copy.hpp"
84 #include "utilities/dtrace.hpp"
85 #include "utilities/events.hpp"
86 #include "utilities/globalDefinitions.hpp"
87 #include "utilities/hashTable.hpp"
88 #include "utilities/macros.hpp"
89 #include "utilities/xmlstream.hpp"
90 #ifdef COMPILER1
91 #include "c1/c1_Runtime1.hpp"
92 #endif
93 #ifdef COMPILER2
94 #include "opto/runtime.hpp"
95 #endif
96 #if INCLUDE_JFR
97 #include "jfr/jfr.inline.hpp"
98 #endif
99
100 // Shared runtime stub routines reside in their own unique blob with a
101 // single entry point
102
103
104 #define SHARED_STUB_FIELD_DEFINE(name, type) \
105 type* SharedRuntime::BLOB_FIELD_NAME(name);
106 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
107 #undef SHARED_STUB_FIELD_DEFINE
108
109 nmethod* SharedRuntime::_cont_doYield_stub;
110
111 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
112 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
113 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
114 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
115 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
116
117 #if 0
118 // TODO tweak global stub name generation to match this
119 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
120 const char *SharedRuntime::_stub_names[] = {
121 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
122 };
123 #endif
124
125 //----------------------------generate_stubs-----------------------------------
126 void SharedRuntime::generate_initial_stubs() {
127 // Build this early so it's available for the interpreter.
128 _throw_StackOverflowError_blob =
129 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
130 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
131 }
132
133 void SharedRuntime::generate_stubs() {
134 _wrong_method_blob =
135 generate_resolve_blob(StubId::shared_wrong_method_id,
136 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
166 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
167 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
168
169 #if COMPILER2_OR_JVMCI
170 // Vectors are generated only by C2 and JVMCI.
171 bool support_wide = is_wide_vector(MaxVectorSize);
172 if (support_wide) {
173 _polling_page_vectors_safepoint_handler_blob =
174 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
175 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
176 }
177 #endif // COMPILER2_OR_JVMCI
178 _polling_page_safepoint_handler_blob =
179 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
180 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
181 _polling_page_return_handler_blob =
182 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
183 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
184
185 generate_deopt_blob();
186
187 if (UsePerfData) {
188 EXCEPTION_MARK;
189 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
190 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
191 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
192 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
193 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
194 if (HAS_PENDING_EXCEPTION) {
195 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
196 }
197 }
198 }
199
200 void SharedRuntime::init_adapter_library() {
201 AdapterHandlerLibrary::initialize();
202 }
203
204 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
205 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
206 if (TraceThreadTime) {
207 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
208 }
209 st->print(" / %5d events", cnt);
210 st->cr();
211 }
212
213 void SharedRuntime::print_counters_on(outputStream* st) {
214 st->print_cr("SharedRuntime:");
215 if (UsePerfData) {
216 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
217 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
218 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
219 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
220 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
221
222 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
223 _perf_resolve_virtual_total_time->elapsed_counter_value() +
224 _perf_resolve_static_total_time->elapsed_counter_value() +
225 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
226 _perf_ic_miss_total_time->elapsed_counter_value());
227 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
228 if (TraceThreadTime) {
229 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
230 _perf_resolve_virtual_total_time->thread_counter_value() +
231 _perf_resolve_static_total_time->thread_counter_value() +
232 _perf_handle_wrong_method_total_time->thread_counter_value() +
233 _perf_ic_miss_total_time->thread_counter_value());
234 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
235
236 }
237 st->cr();
238 } else {
239 st->print_cr(" no data (UsePerfData is turned off)");
240 }
241 }
242
243 #if INCLUDE_JFR
244 //------------------------------generate jfr runtime stubs ------
245 void SharedRuntime::generate_jfr_stubs() {
246 ResourceMark rm;
247 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
248 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
249
250 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
251 _jfr_return_lease_blob = generate_jfr_return_lease();
252 }
253
254 #endif // INCLUDE_JFR
255
256 #include <math.h>
257
258 // Implementation of SharedRuntime
259
260 // For statistics
261 uint SharedRuntime::_ic_miss_ctr = 0;
262 uint SharedRuntime::_wrong_method_ctr = 0;
263 uint SharedRuntime::_resolve_static_ctr = 0;
264 uint SharedRuntime::_resolve_virtual_ctr = 0;
265 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
266
267 #ifndef PRODUCT
268 uint SharedRuntime::_implicit_null_throws = 0;
269 uint SharedRuntime::_implicit_div0_throws = 0;
270
271 int64_t SharedRuntime::_nof_normal_calls = 0;
272 int64_t SharedRuntime::_nof_inlined_calls = 0;
273 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
274 int64_t SharedRuntime::_nof_static_calls = 0;
275 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
276 int64_t SharedRuntime::_nof_interface_calls = 0;
277 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
278
279 uint SharedRuntime::_new_instance_ctr=0;
280 uint SharedRuntime::_new_array_ctr=0;
281 uint SharedRuntime::_multi2_ctr=0;
282 uint SharedRuntime::_multi3_ctr=0;
283 uint SharedRuntime::_multi4_ctr=0;
284 uint SharedRuntime::_multi5_ctr=0;
285 uint SharedRuntime::_mon_enter_stub_ctr=0;
286 uint SharedRuntime::_mon_exit_stub_ctr=0;
287 uint SharedRuntime::_mon_enter_ctr=0;
301 uint SharedRuntime::_unsafe_set_memory_ctr=0;
302
303 int SharedRuntime::_ICmiss_index = 0;
304 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
305 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
306
307
308 void SharedRuntime::trace_ic_miss(address at) {
309 for (int i = 0; i < _ICmiss_index; i++) {
310 if (_ICmiss_at[i] == at) {
311 _ICmiss_count[i]++;
312 return;
313 }
314 }
315 int index = _ICmiss_index++;
316 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
317 _ICmiss_at[index] = at;
318 _ICmiss_count[index] = 1;
319 }
320
321 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
322 if (ICMissHistogram) {
323 st->print_cr("IC Miss Histogram:");
324 int tot_misses = 0;
325 for (int i = 0; i < _ICmiss_index; i++) {
326 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
327 tot_misses += _ICmiss_count[i];
328 }
329 st->print_cr("Total IC misses: %7d", tot_misses);
330 }
331 }
332
333 #ifdef COMPILER2
334 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
335 void SharedRuntime::debug_print_value(jboolean x) {
336 tty->print_cr("boolean %d", x);
337 }
338
339 void SharedRuntime::debug_print_value(jbyte x) {
340 tty->print_cr("byte %d", x);
341 }
342
343 void SharedRuntime::debug_print_value(jshort x) {
344 tty->print_cr("short %d", x);
345 }
346
347 void SharedRuntime::debug_print_value(jchar x) {
348 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
349 }
795
796 void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
797 Handle h_exception = Exceptions::new_exception(current, name, message);
798 throw_and_post_jvmti_exception(current, h_exception);
799 }
800
801 // The interpreter code to call this tracing function is only
802 // called/generated when UL is on for redefine, class and has the right level
803 // and tags. Since obsolete methods are never compiled, we don't have
804 // to modify the compilers to generate calls to this function.
805 //
806 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
807 JavaThread* thread, Method* method))
808 if (method->is_obsolete()) {
809 // We are calling an obsolete method, but this is not necessarily
810 // an error. Our method could have been redefined just after we
811 // fetched the Method* from the constant pool.
812 ResourceMark rm;
813 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
814 }
815
816 LogStreamHandle(Trace, interpreter, bytecode) log;
817 if (log.is_enabled()) {
818 ResourceMark rm;
819 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
820 p2i(thread),
821 (method->is_static() ? "static" : "virtual"),
822 method->name_and_sig_as_C_string(),
823 (method->is_native() ? " native" : ""),
824 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
825 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
826 }
827 return 0;
828 JRT_END
829
830 // ret_pc points into caller; we are returning caller's exception handler
831 // for given exception
832 // Note that the implementation of this method assumes it's only called when an exception has actually occured
833 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
834 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
835 assert(nm != nullptr, "must exist");
836 ResourceMark rm;
837
838 #if INCLUDE_JVMCI
839 if (nm->is_compiled_by_jvmci()) {
840 // lookup exception handler for this pc
841 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
842 ExceptionHandlerTable table(nm);
843 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
844 if (t != nullptr) {
845 return nm->code_begin() + t->pco();
846 } else {
1460
1461 // determine call info & receiver
1462 // note: a) receiver is null for static calls
1463 // b) an exception is thrown if receiver is null for non-static calls
1464 CallInfo call_info;
1465 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1466 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1467
1468 NoSafepointVerifier nsv;
1469
1470 methodHandle callee_method(current, call_info.selected_method());
1471
1472 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1473 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1474 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1475 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1476 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1477
1478 assert(!caller_nm->is_unloading(), "It should not be unloading");
1479
1480 // tracing/debugging/statistics
1481 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1482 (is_virtual) ? (&_resolve_virtual_ctr) :
1483 (&_resolve_static_ctr);
1484 AtomicAccess::inc(addr);
1485
1486 #ifndef PRODUCT
1487 if (TraceCallFixup) {
1488 ResourceMark rm(current);
1489 tty->print("resolving %s%s (%s) call to",
1490 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1491 Bytecodes::name(invoke_code));
1492 callee_method->print_short_name(tty);
1493 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1494 p2i(caller_frame.pc()), p2i(callee_method->code()));
1495 }
1496 #endif
1497
1498 if (invoke_code == Bytecodes::_invokestatic) {
1499 assert(callee_method->method_holder()->is_initialized() ||
1500 callee_method->method_holder()->is_reentrant_initialization(current),
1501 "invalid class initialization state for invoke_static");
1502 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1503 // In order to keep class initialization check, do not patch call
1504 // site for static call when the class is not fully initialized.
1505 // Proper check is enforced by call site re-resolution on every invocation.
1506 //
1522
1523 // Make sure the callee nmethod does not get deoptimized and removed before
1524 // we are done patching the code.
1525
1526
1527 CompiledICLocker ml(caller_nm);
1528 if (is_virtual && !is_optimized) {
1529 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1530 inline_cache->update(&call_info, receiver->klass());
1531 } else {
1532 // Callsite is a direct call - set it to the destination method
1533 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1534 callsite->set(callee_method);
1535 }
1536
1537 return callee_method;
1538 }
1539
1540 // Inline caches exist only in compiled code
1541 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1542 PerfTraceTime timer(_perf_ic_miss_total_time);
1543
1544 #ifdef ASSERT
1545 RegisterMap reg_map(current,
1546 RegisterMap::UpdateMap::skip,
1547 RegisterMap::ProcessFrames::include,
1548 RegisterMap::WalkContinuation::skip);
1549 frame stub_frame = current->last_frame();
1550 assert(stub_frame.is_runtime_frame(), "sanity check");
1551 frame caller_frame = stub_frame.sender(®_map);
1552 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1553 #endif /* ASSERT */
1554
1555 methodHandle callee_method;
1556 JRT_BLOCK
1557 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1558 // Return Method* through TLS
1559 current->set_vm_result_metadata(callee_method());
1560 JRT_BLOCK_END
1561 // return compiled code entry point after potential safepoints
1562 return get_resolved_entry(current, callee_method);
1563 JRT_END
1564
1565
1566 // Handle call site that has been made non-entrant
1567 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1568 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1569
1570 // 6243940 We might end up in here if the callee is deoptimized
1571 // as we race to call it. We don't want to take a safepoint if
1572 // the caller was interpreted because the caller frame will look
1573 // interpreted to the stack walkers and arguments are now
1574 // "compiled" so it is much better to make this transition
1575 // invisible to the stack walking code. The i2c path will
1576 // place the callee method in the callee_target. It is stashed
1577 // there because if we try and find the callee by normal means a
1578 // safepoint is possible and have trouble gc'ing the compiled args.
1579 RegisterMap reg_map(current,
1580 RegisterMap::UpdateMap::skip,
1581 RegisterMap::ProcessFrames::include,
1582 RegisterMap::WalkContinuation::skip);
1583 frame stub_frame = current->last_frame();
1584 assert(stub_frame.is_runtime_frame(), "sanity check");
1585 frame caller_frame = stub_frame.sender(®_map);
1586
1587 if (caller_frame.is_interpreted_frame() ||
1588 caller_frame.is_entry_frame() ||
1589 caller_frame.is_upcall_stub_frame()) {
1602 // so bypassing it in c2i adapter is benign.
1603 return callee->get_c2i_no_clinit_check_entry();
1604 } else {
1605 return callee->get_c2i_entry();
1606 }
1607 }
1608
1609 // Must be compiled to compiled path which is safe to stackwalk
1610 methodHandle callee_method;
1611 JRT_BLOCK
1612 // Force resolving of caller (if we called from compiled frame)
1613 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1614 current->set_vm_result_metadata(callee_method());
1615 JRT_BLOCK_END
1616 // return compiled code entry point after potential safepoints
1617 return get_resolved_entry(current, callee_method);
1618 JRT_END
1619
1620 // Handle abstract method call
1621 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1622 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1623
1624 // Verbose error message for AbstractMethodError.
1625 // Get the called method from the invoke bytecode.
1626 vframeStream vfst(current, true);
1627 assert(!vfst.at_end(), "Java frame must exist");
1628 methodHandle caller(current, vfst.method());
1629 Bytecode_invoke invoke(caller, vfst.bci());
1630 DEBUG_ONLY( invoke.verify(); )
1631
1632 // Find the compiled caller frame.
1633 RegisterMap reg_map(current,
1634 RegisterMap::UpdateMap::include,
1635 RegisterMap::ProcessFrames::include,
1636 RegisterMap::WalkContinuation::skip);
1637 frame stubFrame = current->last_frame();
1638 assert(stubFrame.is_runtime_frame(), "must be");
1639 frame callerFrame = stubFrame.sender(®_map);
1640 assert(callerFrame.is_compiled_frame(), "must be");
1641
1642 // Install exception and return forward entry.
1643 address res = SharedRuntime::throw_AbstractMethodError_entry();
1650 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1651 }
1652 JRT_BLOCK_END
1653 return res;
1654 JRT_END
1655
1656 // return verified_code_entry if interp_only_mode is not set for the current thread;
1657 // otherwise return c2i entry.
1658 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1659 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1660 // In interp_only_mode we need to go to the interpreted entry
1661 // The c2i won't patch in this mode -- see fixup_callers_callsite
1662 return callee_method->get_c2i_entry();
1663 }
1664 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1665 return callee_method->verified_code_entry();
1666 }
1667
1668 // resolve a static call and patch code
1669 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1670 PerfTraceTime timer(_perf_resolve_static_total_time);
1671
1672 methodHandle callee_method;
1673 bool enter_special = false;
1674 JRT_BLOCK
1675 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1676 current->set_vm_result_metadata(callee_method());
1677 JRT_BLOCK_END
1678 // return compiled code entry point after potential safepoints
1679 return get_resolved_entry(current, callee_method);
1680 JRT_END
1681
1682 // resolve virtual call and update inline cache to monomorphic
1683 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1684 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1685
1686 methodHandle callee_method;
1687 JRT_BLOCK
1688 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1689 current->set_vm_result_metadata(callee_method());
1690 JRT_BLOCK_END
1691 // return compiled code entry point after potential safepoints
1692 return get_resolved_entry(current, callee_method);
1693 JRT_END
1694
1695
1696 // Resolve a virtual call that can be statically bound (e.g., always
1697 // monomorphic, so it has no inline cache). Patch code to resolved target.
1698 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1699 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1700
1701 methodHandle callee_method;
1702 JRT_BLOCK
1703 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1704 current->set_vm_result_metadata(callee_method());
1705 JRT_BLOCK_END
1706 // return compiled code entry point after potential safepoints
1707 return get_resolved_entry(current, callee_method);
1708 JRT_END
1709
1710 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1711 JavaThread* current = THREAD;
1712 ResourceMark rm(current);
1713 CallInfo call_info;
1714 Bytecodes::Code bc;
1715
1716 // receiver is null for static calls. An exception is thrown for null
1717 // receivers for non-static calls
1718 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1719
1720 methodHandle callee_method(current, call_info.selected_method());
1721
1722 AtomicAccess::inc(&_ic_miss_ctr);
1723
1724 #ifndef PRODUCT
1725 // Statistics & Tracing
1726 if (TraceCallFixup) {
1727 ResourceMark rm(current);
1728 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1729 callee_method->print_short_name(tty);
1730 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1731 }
1732
1733 if (ICMissHistogram) {
1734 MutexLocker m(VMStatistic_lock);
1735 RegisterMap reg_map(current,
1736 RegisterMap::UpdateMap::skip,
1737 RegisterMap::ProcessFrames::include,
1738 RegisterMap::WalkContinuation::skip);
1739 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1740 // produce statistics under the lock
1741 trace_ic_miss(f.pc());
1742 }
1743 #endif
1744
1827 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1828 cdc->set_to_clean();
1829 break;
1830 }
1831
1832 case relocInfo::virtual_call_type: {
1833 // compiled, dispatched call (which used to call an interpreted method)
1834 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1835 inline_cache->set_to_clean();
1836 break;
1837 }
1838 default:
1839 break;
1840 }
1841 }
1842 }
1843 }
1844
1845 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1846
1847 AtomicAccess::inc(&_wrong_method_ctr);
1848
1849 #ifndef PRODUCT
1850 if (TraceCallFixup) {
1851 ResourceMark rm(current);
1852 tty->print("handle_wrong_method reresolving call to");
1853 callee_method->print_short_name(tty);
1854 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1855 }
1856 #endif
1857
1858 return callee_method;
1859 }
1860
1861 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1862 // The faulting unsafe accesses should be changed to throw the error
1863 // synchronously instead. Meanwhile the faulting instruction will be
1864 // skipped over (effectively turning it into a no-op) and an
1865 // asynchronous exception will be raised which the thread will
1866 // handle at a later point. If the instruction is a load it will
1867 // return garbage.
1868
1869 // Request an async exception.
2112 if (CheckJNICalls) {
2113 fatal("Object has been unlocked by JNI");
2114 }
2115 return;
2116 }
2117 ObjectSynchronizer::exit(obj, lock, current);
2118 }
2119
2120 // Handles the uncommon cases of monitor unlocking in compiled code
2121 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2122 assert(current == JavaThread::current(), "pre-condition");
2123 SharedRuntime::monitor_exit_helper(obj, lock, current);
2124 JRT_END
2125
2126 #ifndef PRODUCT
2127
2128 void SharedRuntime::print_statistics() {
2129 ttyLocker ttyl;
2130 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2131
2132 SharedRuntime::print_ic_miss_histogram_on(tty);
2133 SharedRuntime::print_counters_on(tty);
2134 AdapterHandlerLibrary::print_statistics_on(tty);
2135
2136 if (xtty != nullptr) xtty->tail("statistics");
2137 }
2138
2139 //void SharedRuntime::print_counters_on(outputStream* st) {
2140 // // Dump the JRT_ENTRY counters
2141 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2142 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2143 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2144 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2145 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2146 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2147 //
2148 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2149 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2150 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2151 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2152 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2153 //
2154 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2155 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2156 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2157 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2158 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2159 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2160 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2161 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2162 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2163 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2164 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2165 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2166 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2167 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2168 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2169 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2170 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2171 //}
2172
2173 inline double percent(int64_t x, int64_t y) {
2174 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2175 }
2176
2177 class MethodArityHistogram {
2178 public:
2179 enum { MAX_ARITY = 256 };
2180 private:
2181 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2182 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2183 static uint64_t _total_compiled_calls;
2184 static uint64_t _max_compiled_calls_per_method;
2185 static int _max_arity; // max. arity seen
2186 static int _max_size; // max. arg size seen
2187
2188 static void add_method_to_histogram(nmethod* nm) {
2189 Method* method = (nm == nullptr) ? nullptr : nm->method();
2190 if (method != nullptr) {
2191 ArgumentCount args(method->signature());
2192 int arity = args.size() + (method->is_static() ? 0 : 1);
2237 // Take the Compile_lock to protect against changes in the CodeBlob structures
2238 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2239 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2240 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2241 _max_arity = _max_size = 0;
2242 _total_compiled_calls = 0;
2243 _max_compiled_calls_per_method = 0;
2244 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2245 CodeCache::nmethods_do(add_method_to_histogram);
2246 print_histogram();
2247 }
2248 };
2249
2250 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2251 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2252 uint64_t MethodArityHistogram::_total_compiled_calls;
2253 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2254 int MethodArityHistogram::_max_arity;
2255 int MethodArityHistogram::_max_size;
2256
2257 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2258 tty->print_cr("Calls from compiled code:");
2259 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2260 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2261 int64_t mono_i = _nof_interface_calls;
2262 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2263 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2264 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2265 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2266 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2267 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2268 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2269 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2270 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2271 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2272 tty->cr();
2273 tty->print_cr("Note 1: counter updates are not MT-safe.");
2274 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2275 tty->print_cr(" %% in nested categories are relative to their category");
2276 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2277 tty->cr();
2581 }
2582 #endif // INCLUDE_CDS
2583 if (entry == nullptr) {
2584 assert_lock_strong(AdapterHandlerLibrary_lock);
2585 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2586 if (entry_p != nullptr) {
2587 entry = *entry_p;
2588 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2589 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2590 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2591 #ifndef PRODUCT
2592 _runtime_hits++;
2593 #endif
2594 }
2595 }
2596 AdapterFingerPrint::deallocate(fp);
2597 return entry;
2598 }
2599
2600 #ifndef PRODUCT
2601 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2602 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2603 return sizeof(*key) + sizeof(*a);
2604 };
2605 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2606 ts.print(st, "AdapterHandlerTable");
2607 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2608 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2609 int total_hits = _archived_hits + _runtime_hits;
2610 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2611 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2612 }
2613 #endif // !PRODUCT
2614
2615 // ---------------------------------------------------------------------------
2616 // Implementation of AdapterHandlerLibrary
2617 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2618 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2619 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2620 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2621 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2622 #if INCLUDE_CDS
2623 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2624 #endif // INCLUDE_CDS
2625 static const int AdapterHandlerLibrary_size = 16*K;
2626 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2627 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2628
2629 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2630 assert(_buffer != nullptr, "should be initialized");
2631 return _buffer;
2632 }
2633
3495 };
3496 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3497 _adapter_handler_table->iterate(findblob_runtime_table);
3498 }
3499 assert(found, "Should have found handler");
3500 }
3501
3502 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3503 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3504 if (adapter_blob() != nullptr) {
3505 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3506 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3507 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3508 if (get_c2i_no_clinit_check_entry() != nullptr) {
3509 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3510 }
3511 }
3512 st->cr();
3513 }
3514
3515 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3516 assert(current == JavaThread::current(), "pre-condition");
3517 StackOverflow* overflow_state = current->stack_overflow_state();
3518 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3519 overflow_state->set_reserved_stack_activation(current->stack_base());
3520 JRT_END
3521
3522 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3523 ResourceMark rm(current);
3524 frame activation;
3525 nmethod* nm = nullptr;
3526 int count = 1;
3527
3528 assert(fr.is_java_frame(), "Must start on Java frame");
3529
3530 RegisterMap map(JavaThread::current(),
3531 RegisterMap::UpdateMap::skip,
3532 RegisterMap::ProcessFrames::skip,
3533 RegisterMap::WalkContinuation::skip); // don't walk continuations
3534 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|