53 #include "oops/klass.hpp"
54 #include "oops/method.inline.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/oop.inline.hpp"
57 #include "prims/forte.hpp"
58 #include "prims/jvmtiExport.hpp"
59 #include "prims/jvmtiThreadState.hpp"
60 #include "prims/methodHandles.hpp"
61 #include "prims/nativeLookup.hpp"
62 #include "runtime/arguments.hpp"
63 #include "runtime/atomicAccess.hpp"
64 #include "runtime/basicLock.inline.hpp"
65 #include "runtime/frame.inline.hpp"
66 #include "runtime/handles.inline.hpp"
67 #include "runtime/init.hpp"
68 #include "runtime/interfaceSupport.inline.hpp"
69 #include "runtime/java.hpp"
70 #include "runtime/javaCalls.hpp"
71 #include "runtime/jniHandles.inline.hpp"
72 #include "runtime/osThread.hpp"
73 #include "runtime/perfData.hpp"
74 #include "runtime/sharedRuntime.hpp"
75 #include "runtime/stackWatermarkSet.hpp"
76 #include "runtime/stubRoutines.hpp"
77 #include "runtime/synchronizer.hpp"
78 #include "runtime/timerTrace.hpp"
79 #include "runtime/vframe.inline.hpp"
80 #include "runtime/vframeArray.hpp"
81 #include "runtime/vm_version.hpp"
82 #include "utilities/copy.hpp"
83 #include "utilities/dtrace.hpp"
84 #include "utilities/events.hpp"
85 #include "utilities/exceptions.hpp"
86 #include "utilities/globalDefinitions.hpp"
87 #include "utilities/hashTable.hpp"
88 #include "utilities/macros.hpp"
89 #include "utilities/xmlstream.hpp"
90 #ifdef COMPILER1
91 #include "c1/c1_Runtime1.hpp"
92 #endif
93 #ifdef COMPILER2
94 #include "opto/runtime.hpp"
95 #endif
96 #if INCLUDE_JFR
97 #include "jfr/jfr.inline.hpp"
98 #endif
99
100 // Shared runtime stub routines reside in their own unique blob with a
101 // single entry point
102
103
104 #define SHARED_STUB_FIELD_DEFINE(name, type) \
105 type* SharedRuntime::BLOB_FIELD_NAME(name);
106 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
107 #undef SHARED_STUB_FIELD_DEFINE
108
109 nmethod* SharedRuntime::_cont_doYield_stub;
110
111 #if 0
112 // TODO tweak global stub name generation to match this
113 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
114 const char *SharedRuntime::_stub_names[] = {
115 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
116 };
117 #endif
118
119 //----------------------------generate_stubs-----------------------------------
120 void SharedRuntime::generate_initial_stubs() {
121 // Build this early so it's available for the interpreter.
122 _throw_StackOverflowError_blob =
123 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
124 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
125 }
126
127 void SharedRuntime::generate_stubs() {
128 _wrong_method_blob =
129 generate_resolve_blob(StubId::shared_wrong_method_id,
130 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
160 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
161 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
162
163 #if COMPILER2_OR_JVMCI
164 // Vectors are generated only by C2 and JVMCI.
165 bool support_wide = is_wide_vector(MaxVectorSize);
166 if (support_wide) {
167 _polling_page_vectors_safepoint_handler_blob =
168 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
169 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
170 }
171 #endif // COMPILER2_OR_JVMCI
172 _polling_page_safepoint_handler_blob =
173 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
174 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
175 _polling_page_return_handler_blob =
176 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
177 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
178
179 generate_deopt_blob();
180 }
181
182 void SharedRuntime::init_adapter_library() {
183 AdapterHandlerLibrary::initialize();
184 }
185
186 #if INCLUDE_JFR
187 //------------------------------generate jfr runtime stubs ------
188 void SharedRuntime::generate_jfr_stubs() {
189 ResourceMark rm;
190 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
191 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
192
193 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
194 _jfr_return_lease_blob = generate_jfr_return_lease();
195 }
196
197 #endif // INCLUDE_JFR
198
199 #include <math.h>
200
201 // Implementation of SharedRuntime
202
203 #ifndef PRODUCT
204 // For statistics
205 uint SharedRuntime::_ic_miss_ctr = 0;
206 uint SharedRuntime::_wrong_method_ctr = 0;
207 uint SharedRuntime::_resolve_static_ctr = 0;
208 uint SharedRuntime::_resolve_virtual_ctr = 0;
209 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
210 uint SharedRuntime::_implicit_null_throws = 0;
211 uint SharedRuntime::_implicit_div0_throws = 0;
212
213 int64_t SharedRuntime::_nof_normal_calls = 0;
214 int64_t SharedRuntime::_nof_inlined_calls = 0;
215 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
216 int64_t SharedRuntime::_nof_static_calls = 0;
217 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
218 int64_t SharedRuntime::_nof_interface_calls = 0;
219 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
220
221 uint SharedRuntime::_new_instance_ctr=0;
222 uint SharedRuntime::_new_array_ctr=0;
223 uint SharedRuntime::_multi2_ctr=0;
224 uint SharedRuntime::_multi3_ctr=0;
225 uint SharedRuntime::_multi4_ctr=0;
226 uint SharedRuntime::_multi5_ctr=0;
227 uint SharedRuntime::_mon_enter_stub_ctr=0;
228 uint SharedRuntime::_mon_exit_stub_ctr=0;
229 uint SharedRuntime::_mon_enter_ctr=0;
243 uint SharedRuntime::_unsafe_set_memory_ctr=0;
244
245 int SharedRuntime::_ICmiss_index = 0;
246 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
247 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
248
249
250 void SharedRuntime::trace_ic_miss(address at) {
251 for (int i = 0; i < _ICmiss_index; i++) {
252 if (_ICmiss_at[i] == at) {
253 _ICmiss_count[i]++;
254 return;
255 }
256 }
257 int index = _ICmiss_index++;
258 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
259 _ICmiss_at[index] = at;
260 _ICmiss_count[index] = 1;
261 }
262
263 void SharedRuntime::print_ic_miss_histogram() {
264 if (ICMissHistogram) {
265 tty->print_cr("IC Miss Histogram:");
266 int tot_misses = 0;
267 for (int i = 0; i < _ICmiss_index; i++) {
268 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
269 tot_misses += _ICmiss_count[i];
270 }
271 tty->print_cr("Total IC misses: %7d", tot_misses);
272 }
273 }
274
275 #ifdef COMPILER2
276 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
277 void SharedRuntime::debug_print_value(jboolean x) {
278 tty->print_cr("boolean %d", x);
279 }
280
281 void SharedRuntime::debug_print_value(jbyte x) {
282 tty->print_cr("byte %d", x);
283 }
284
285 void SharedRuntime::debug_print_value(jshort x) {
286 tty->print_cr("short %d", x);
287 }
288
289 void SharedRuntime::debug_print_value(jchar x) {
290 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
291 }
737
738 void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
739 Handle h_exception = Exceptions::new_exception(current, name, message);
740 throw_and_post_jvmti_exception(current, h_exception);
741 }
742
743 // The interpreter code to call this tracing function is only
744 // called/generated when UL is on for redefine, class and has the right level
745 // and tags. Since obsolete methods are never compiled, we don't have
746 // to modify the compilers to generate calls to this function.
747 //
748 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
749 JavaThread* thread, Method* method))
750 if (method->is_obsolete()) {
751 // We are calling an obsolete method, but this is not necessarily
752 // an error. Our method could have been redefined just after we
753 // fetched the Method* from the constant pool.
754 ResourceMark rm;
755 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
756 }
757 return 0;
758 JRT_END
759
760 // ret_pc points into caller; we are returning caller's exception handler
761 // for given exception
762 // Note that the implementation of this method assumes it's only called when an exception has actually occured
763 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
764 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
765 assert(nm != nullptr, "must exist");
766 ResourceMark rm;
767
768 #if INCLUDE_JVMCI
769 if (nm->is_compiled_by_jvmci()) {
770 // lookup exception handler for this pc
771 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
772 ExceptionHandlerTable table(nm);
773 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
774 if (t != nullptr) {
775 return nm->code_begin() + t->pco();
776 } else {
1390
1391 // determine call info & receiver
1392 // note: a) receiver is null for static calls
1393 // b) an exception is thrown if receiver is null for non-static calls
1394 CallInfo call_info;
1395 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1396 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1397
1398 NoSafepointVerifier nsv;
1399
1400 methodHandle callee_method(current, call_info.selected_method());
1401
1402 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1403 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1404 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1405 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1406 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1407
1408 assert(!caller_nm->is_unloading(), "It should not be unloading");
1409
1410 #ifndef PRODUCT
1411 // tracing/debugging/statistics
1412 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1413 (is_virtual) ? (&_resolve_virtual_ctr) :
1414 (&_resolve_static_ctr);
1415 AtomicAccess::inc(addr);
1416
1417 if (TraceCallFixup) {
1418 ResourceMark rm(current);
1419 tty->print("resolving %s%s (%s) call to",
1420 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1421 Bytecodes::name(invoke_code));
1422 callee_method->print_short_name(tty);
1423 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1424 p2i(caller_frame.pc()), p2i(callee_method->code()));
1425 }
1426 #endif
1427
1428 if (invoke_code == Bytecodes::_invokestatic) {
1429 assert(callee_method->method_holder()->is_initialized() ||
1430 callee_method->method_holder()->is_reentrant_initialization(current),
1431 "invalid class initialization state for invoke_static");
1432 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1433 // In order to keep class initialization check, do not patch call
1434 // site for static call when the class is not fully initialized.
1435 // Proper check is enforced by call site re-resolution on every invocation.
1436 //
1452
1453 // Make sure the callee nmethod does not get deoptimized and removed before
1454 // we are done patching the code.
1455
1456
1457 CompiledICLocker ml(caller_nm);
1458 if (is_virtual && !is_optimized) {
1459 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1460 inline_cache->update(&call_info, receiver->klass());
1461 } else {
1462 // Callsite is a direct call - set it to the destination method
1463 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1464 callsite->set(callee_method);
1465 }
1466
1467 return callee_method;
1468 }
1469
1470 // Inline caches exist only in compiled code
1471 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1472 #ifdef ASSERT
1473 RegisterMap reg_map(current,
1474 RegisterMap::UpdateMap::skip,
1475 RegisterMap::ProcessFrames::include,
1476 RegisterMap::WalkContinuation::skip);
1477 frame stub_frame = current->last_frame();
1478 assert(stub_frame.is_runtime_frame(), "sanity check");
1479 frame caller_frame = stub_frame.sender(®_map);
1480 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1481 #endif /* ASSERT */
1482
1483 methodHandle callee_method;
1484 JRT_BLOCK
1485 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1486 // Return Method* through TLS
1487 current->set_vm_result_metadata(callee_method());
1488 JRT_BLOCK_END
1489 // return compiled code entry point after potential safepoints
1490 return get_resolved_entry(current, callee_method);
1491 JRT_END
1492
1493
1494 // Handle call site that has been made non-entrant
1495 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1496 // 6243940 We might end up in here if the callee is deoptimized
1497 // as we race to call it. We don't want to take a safepoint if
1498 // the caller was interpreted because the caller frame will look
1499 // interpreted to the stack walkers and arguments are now
1500 // "compiled" so it is much better to make this transition
1501 // invisible to the stack walking code. The i2c path will
1502 // place the callee method in the callee_target. It is stashed
1503 // there because if we try and find the callee by normal means a
1504 // safepoint is possible and have trouble gc'ing the compiled args.
1505 RegisterMap reg_map(current,
1506 RegisterMap::UpdateMap::skip,
1507 RegisterMap::ProcessFrames::include,
1508 RegisterMap::WalkContinuation::skip);
1509 frame stub_frame = current->last_frame();
1510 assert(stub_frame.is_runtime_frame(), "sanity check");
1511 frame caller_frame = stub_frame.sender(®_map);
1512
1513 if (caller_frame.is_interpreted_frame() ||
1514 caller_frame.is_entry_frame() ||
1515 caller_frame.is_upcall_stub_frame()) {
1528 // so bypassing it in c2i adapter is benign.
1529 return callee->get_c2i_no_clinit_check_entry();
1530 } else {
1531 return callee->get_c2i_entry();
1532 }
1533 }
1534
1535 // Must be compiled to compiled path which is safe to stackwalk
1536 methodHandle callee_method;
1537 JRT_BLOCK
1538 // Force resolving of caller (if we called from compiled frame)
1539 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1540 current->set_vm_result_metadata(callee_method());
1541 JRT_BLOCK_END
1542 // return compiled code entry point after potential safepoints
1543 return get_resolved_entry(current, callee_method);
1544 JRT_END
1545
1546 // Handle abstract method call
1547 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1548 // Verbose error message for AbstractMethodError.
1549 // Get the called method from the invoke bytecode.
1550 vframeStream vfst(current, true);
1551 assert(!vfst.at_end(), "Java frame must exist");
1552 methodHandle caller(current, vfst.method());
1553 Bytecode_invoke invoke(caller, vfst.bci());
1554 DEBUG_ONLY( invoke.verify(); )
1555
1556 // Find the compiled caller frame.
1557 RegisterMap reg_map(current,
1558 RegisterMap::UpdateMap::include,
1559 RegisterMap::ProcessFrames::include,
1560 RegisterMap::WalkContinuation::skip);
1561 frame stubFrame = current->last_frame();
1562 assert(stubFrame.is_runtime_frame(), "must be");
1563 frame callerFrame = stubFrame.sender(®_map);
1564 assert(callerFrame.is_compiled_frame(), "must be");
1565
1566 // Install exception and return forward entry.
1567 address res = SharedRuntime::throw_AbstractMethodError_entry();
1574 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1575 }
1576 JRT_BLOCK_END
1577 return res;
1578 JRT_END
1579
1580 // return verified_code_entry if interp_only_mode is not set for the current thread;
1581 // otherwise return c2i entry.
1582 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1583 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1584 // In interp_only_mode we need to go to the interpreted entry
1585 // The c2i won't patch in this mode -- see fixup_callers_callsite
1586 return callee_method->get_c2i_entry();
1587 }
1588 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1589 return callee_method->verified_code_entry();
1590 }
1591
1592 // resolve a static call and patch code
1593 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1594 methodHandle callee_method;
1595 bool enter_special = false;
1596 JRT_BLOCK
1597 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1598 current->set_vm_result_metadata(callee_method());
1599 JRT_BLOCK_END
1600 // return compiled code entry point after potential safepoints
1601 return get_resolved_entry(current, callee_method);
1602 JRT_END
1603
1604 // resolve virtual call and update inline cache to monomorphic
1605 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1606 methodHandle callee_method;
1607 JRT_BLOCK
1608 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1609 current->set_vm_result_metadata(callee_method());
1610 JRT_BLOCK_END
1611 // return compiled code entry point after potential safepoints
1612 return get_resolved_entry(current, callee_method);
1613 JRT_END
1614
1615
1616 // Resolve a virtual call that can be statically bound (e.g., always
1617 // monomorphic, so it has no inline cache). Patch code to resolved target.
1618 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1619 methodHandle callee_method;
1620 JRT_BLOCK
1621 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1622 current->set_vm_result_metadata(callee_method());
1623 JRT_BLOCK_END
1624 // return compiled code entry point after potential safepoints
1625 return get_resolved_entry(current, callee_method);
1626 JRT_END
1627
1628 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1629 JavaThread* current = THREAD;
1630 ResourceMark rm(current);
1631 CallInfo call_info;
1632 Bytecodes::Code bc;
1633
1634 // receiver is null for static calls. An exception is thrown for null
1635 // receivers for non-static calls
1636 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1637
1638 methodHandle callee_method(current, call_info.selected_method());
1639
1640 #ifndef PRODUCT
1641 AtomicAccess::inc(&_ic_miss_ctr);
1642
1643 // Statistics & Tracing
1644 if (TraceCallFixup) {
1645 ResourceMark rm(current);
1646 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1647 callee_method->print_short_name(tty);
1648 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1649 }
1650
1651 if (ICMissHistogram) {
1652 MutexLocker m(VMStatistic_lock);
1653 RegisterMap reg_map(current,
1654 RegisterMap::UpdateMap::skip,
1655 RegisterMap::ProcessFrames::include,
1656 RegisterMap::WalkContinuation::skip);
1657 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1658 // produce statistics under the lock
1659 trace_ic_miss(f.pc());
1660 }
1661 #endif
1662
1745 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1746 cdc->set_to_clean();
1747 break;
1748 }
1749
1750 case relocInfo::virtual_call_type: {
1751 // compiled, dispatched call (which used to call an interpreted method)
1752 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1753 inline_cache->set_to_clean();
1754 break;
1755 }
1756 default:
1757 break;
1758 }
1759 }
1760 }
1761 }
1762
1763 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1764
1765
1766 #ifndef PRODUCT
1767 AtomicAccess::inc(&_wrong_method_ctr);
1768
1769 if (TraceCallFixup) {
1770 ResourceMark rm(current);
1771 tty->print("handle_wrong_method reresolving call to");
1772 callee_method->print_short_name(tty);
1773 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1774 }
1775 #endif
1776
1777 return callee_method;
1778 }
1779
1780 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1781 // The faulting unsafe accesses should be changed to throw the error
1782 // synchronously instead. Meanwhile the faulting instruction will be
1783 // skipped over (effectively turning it into a no-op) and an
1784 // asynchronous exception will be raised which the thread will
1785 // handle at a later point. If the instruction is a load it will
1786 // return garbage.
1787
1788 // Request an async exception.
2031 if (CheckJNICalls) {
2032 fatal("Object has been unlocked by JNI");
2033 }
2034 return;
2035 }
2036 ObjectSynchronizer::exit(obj, lock, current);
2037 }
2038
2039 // Handles the uncommon cases of monitor unlocking in compiled code
2040 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2041 assert(current == JavaThread::current(), "pre-condition");
2042 SharedRuntime::monitor_exit_helper(obj, lock, current);
2043 JRT_END
2044
2045 #ifndef PRODUCT
2046
2047 void SharedRuntime::print_statistics() {
2048 ttyLocker ttyl;
2049 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2050
2051 SharedRuntime::print_ic_miss_histogram();
2052
2053 // Dump the JRT_ENTRY counters
2054 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2055 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2056 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2057 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2058 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2059 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2060
2061 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2062 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2063 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2064 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2065 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2066
2067 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2068 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2069 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2070 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2071 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2072 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2073 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2074 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2075 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2076 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2077 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2078 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2079 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2080 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2081 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2082 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2083 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2084
2085 AdapterHandlerLibrary::print_statistics();
2086
2087 if (xtty != nullptr) xtty->tail("statistics");
2088 }
2089
2090 inline double percent(int64_t x, int64_t y) {
2091 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2092 }
2093
2094 class MethodArityHistogram {
2095 public:
2096 enum { MAX_ARITY = 256 };
2097 private:
2098 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2099 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2100 static uint64_t _total_compiled_calls;
2101 static uint64_t _max_compiled_calls_per_method;
2102 static int _max_arity; // max. arity seen
2103 static int _max_size; // max. arg size seen
2104
2105 static void add_method_to_histogram(nmethod* nm) {
2106 Method* method = (nm == nullptr) ? nullptr : nm->method();
2107 if (method != nullptr) {
2108 ArgumentCount args(method->signature());
2109 int arity = args.size() + (method->is_static() ? 0 : 1);
2154 // Take the Compile_lock to protect against changes in the CodeBlob structures
2155 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2156 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2157 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2158 _max_arity = _max_size = 0;
2159 _total_compiled_calls = 0;
2160 _max_compiled_calls_per_method = 0;
2161 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2162 CodeCache::nmethods_do(add_method_to_histogram);
2163 print_histogram();
2164 }
2165 };
2166
2167 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2168 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2169 uint64_t MethodArityHistogram::_total_compiled_calls;
2170 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2171 int MethodArityHistogram::_max_arity;
2172 int MethodArityHistogram::_max_size;
2173
2174 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2175 tty->print_cr("Calls from compiled code:");
2176 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2177 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2178 int64_t mono_i = _nof_interface_calls;
2179 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2180 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2181 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2182 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2183 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2184 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2185 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2186 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2187 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2188 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2189 tty->cr();
2190 tty->print_cr("Note 1: counter updates are not MT-safe.");
2191 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2192 tty->print_cr(" %% in nested categories are relative to their category");
2193 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2194 tty->cr();
2498 }
2499 #endif // INCLUDE_CDS
2500 if (entry == nullptr) {
2501 assert_lock_strong(AdapterHandlerLibrary_lock);
2502 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2503 if (entry_p != nullptr) {
2504 entry = *entry_p;
2505 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2506 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2507 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2508 #ifndef PRODUCT
2509 _runtime_hits++;
2510 #endif
2511 }
2512 }
2513 AdapterFingerPrint::deallocate(fp);
2514 return entry;
2515 }
2516
2517 #ifndef PRODUCT
2518 static void print_table_statistics() {
2519 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2520 return sizeof(*key) + sizeof(*a);
2521 };
2522 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2523 ts.print(tty, "AdapterHandlerTable");
2524 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2525 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2526 int total_hits = _archived_hits + _runtime_hits;
2527 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2528 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2529 }
2530 #endif
2531
2532 // ---------------------------------------------------------------------------
2533 // Implementation of AdapterHandlerLibrary
2534 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2535 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2536 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2537 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2538 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2539 #if INCLUDE_CDS
2540 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2541 #endif // INCLUDE_CDS
2542 static const int AdapterHandlerLibrary_size = 16*K;
2543 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2544 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2545
2546 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2547 assert(_buffer != nullptr, "should be initialized");
2548 return _buffer;
2549 }
2550
3412 };
3413 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3414 _adapter_handler_table->iterate(findblob_runtime_table);
3415 }
3416 assert(found, "Should have found handler");
3417 }
3418
3419 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3420 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3421 if (adapter_blob() != nullptr) {
3422 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3423 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3424 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3425 if (get_c2i_no_clinit_check_entry() != nullptr) {
3426 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3427 }
3428 }
3429 st->cr();
3430 }
3431
3432 #ifndef PRODUCT
3433
3434 void AdapterHandlerLibrary::print_statistics() {
3435 print_table_statistics();
3436 }
3437
3438 #endif /* PRODUCT */
3439
3440 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3441 assert(current == JavaThread::current(), "pre-condition");
3442 StackOverflow* overflow_state = current->stack_overflow_state();
3443 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3444 overflow_state->set_reserved_stack_activation(current->stack_base());
3445 JRT_END
3446
3447 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3448 ResourceMark rm(current);
3449 frame activation;
3450 nmethod* nm = nullptr;
3451 int count = 1;
3452
3453 assert(fr.is_java_frame(), "Must start on Java frame");
3454
3455 RegisterMap map(JavaThread::current(),
3456 RegisterMap::UpdateMap::skip,
3457 RegisterMap::ProcessFrames::skip,
3458 RegisterMap::WalkContinuation::skip); // don't walk continuations
3459 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
53 #include "oops/klass.hpp"
54 #include "oops/method.inline.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/oop.inline.hpp"
57 #include "prims/forte.hpp"
58 #include "prims/jvmtiExport.hpp"
59 #include "prims/jvmtiThreadState.hpp"
60 #include "prims/methodHandles.hpp"
61 #include "prims/nativeLookup.hpp"
62 #include "runtime/arguments.hpp"
63 #include "runtime/atomicAccess.hpp"
64 #include "runtime/basicLock.inline.hpp"
65 #include "runtime/frame.inline.hpp"
66 #include "runtime/handles.inline.hpp"
67 #include "runtime/init.hpp"
68 #include "runtime/interfaceSupport.inline.hpp"
69 #include "runtime/java.hpp"
70 #include "runtime/javaCalls.hpp"
71 #include "runtime/jniHandles.inline.hpp"
72 #include "runtime/osThread.hpp"
73 #include "runtime/perfData.inline.hpp"
74 #include "runtime/sharedRuntime.hpp"
75 #include "runtime/stackWatermarkSet.hpp"
76 #include "runtime/stubRoutines.hpp"
77 #include "runtime/synchronizer.hpp"
78 #include "runtime/timerTrace.hpp"
79 #include "runtime/vframe.inline.hpp"
80 #include "runtime/vframeArray.hpp"
81 #include "runtime/vm_version.hpp"
82 #include "services/management.hpp"
83 #include "utilities/copy.hpp"
84 #include "utilities/dtrace.hpp"
85 #include "utilities/events.hpp"
86 #include "utilities/exceptions.hpp"
87 #include "utilities/globalDefinitions.hpp"
88 #include "utilities/hashTable.hpp"
89 #include "utilities/macros.hpp"
90 #include "utilities/xmlstream.hpp"
91 #ifdef COMPILER1
92 #include "c1/c1_Runtime1.hpp"
93 #endif
94 #ifdef COMPILER2
95 #include "opto/runtime.hpp"
96 #endif
97 #if INCLUDE_JFR
98 #include "jfr/jfr.inline.hpp"
99 #endif
100
101 // Shared runtime stub routines reside in their own unique blob with a
102 // single entry point
103
104
105 #define SHARED_STUB_FIELD_DEFINE(name, type) \
106 type* SharedRuntime::BLOB_FIELD_NAME(name);
107 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
108 #undef SHARED_STUB_FIELD_DEFINE
109
110 nmethod* SharedRuntime::_cont_doYield_stub;
111
112 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
113 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
114 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
115 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
116 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
117
118 #if 0
119 // TODO tweak global stub name generation to match this
120 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
121 const char *SharedRuntime::_stub_names[] = {
122 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
123 };
124 #endif
125
126 //----------------------------generate_stubs-----------------------------------
127 void SharedRuntime::generate_initial_stubs() {
128 // Build this early so it's available for the interpreter.
129 _throw_StackOverflowError_blob =
130 generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
131 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
132 }
133
134 void SharedRuntime::generate_stubs() {
135 _wrong_method_blob =
136 generate_resolve_blob(StubId::shared_wrong_method_id,
137 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
167 generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
168 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
169
170 #if COMPILER2_OR_JVMCI
171 // Vectors are generated only by C2 and JVMCI.
172 bool support_wide = is_wide_vector(MaxVectorSize);
173 if (support_wide) {
174 _polling_page_vectors_safepoint_handler_blob =
175 generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
176 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
177 }
178 #endif // COMPILER2_OR_JVMCI
179 _polling_page_safepoint_handler_blob =
180 generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
181 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
182 _polling_page_return_handler_blob =
183 generate_handler_blob(StubId::shared_polling_page_return_handler_id,
184 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
185
186 generate_deopt_blob();
187
188 if (UsePerfData) {
189 EXCEPTION_MARK;
190 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
191 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
192 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
193 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
194 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
195 if (HAS_PENDING_EXCEPTION) {
196 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
197 }
198 }
199 }
200
201 void SharedRuntime::init_adapter_library() {
202 AdapterHandlerLibrary::initialize();
203 }
204
205 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
206 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
207 if (TraceThreadTime) {
208 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
209 }
210 st->print(" / %5d events", cnt);
211 st->cr();
212 }
213
214 void SharedRuntime::print_counters_on(outputStream* st) {
215 st->print_cr("SharedRuntime:");
216 if (UsePerfData) {
217 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
218 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
219 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
220 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
221 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
222
223 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
224 _perf_resolve_virtual_total_time->elapsed_counter_value() +
225 _perf_resolve_static_total_time->elapsed_counter_value() +
226 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
227 _perf_ic_miss_total_time->elapsed_counter_value());
228 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
229 if (TraceThreadTime) {
230 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
231 _perf_resolve_virtual_total_time->thread_counter_value() +
232 _perf_resolve_static_total_time->thread_counter_value() +
233 _perf_handle_wrong_method_total_time->thread_counter_value() +
234 _perf_ic_miss_total_time->thread_counter_value());
235 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
236
237 }
238 st->cr();
239 } else {
240 st->print_cr(" no data (UsePerfData is turned off)");
241 }
242 }
243
244 #if INCLUDE_JFR
245 //------------------------------generate jfr runtime stubs ------
246 void SharedRuntime::generate_jfr_stubs() {
247 ResourceMark rm;
248 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
249 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
250
251 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
252 _jfr_return_lease_blob = generate_jfr_return_lease();
253 }
254
255 #endif // INCLUDE_JFR
256
257 #include <math.h>
258
259 // Implementation of SharedRuntime
260
261 // For statistics
262 uint SharedRuntime::_ic_miss_ctr = 0;
263 uint SharedRuntime::_wrong_method_ctr = 0;
264 uint SharedRuntime::_resolve_static_ctr = 0;
265 uint SharedRuntime::_resolve_virtual_ctr = 0;
266 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
267
268 #ifndef PRODUCT
269 uint SharedRuntime::_implicit_null_throws = 0;
270 uint SharedRuntime::_implicit_div0_throws = 0;
271
272 int64_t SharedRuntime::_nof_normal_calls = 0;
273 int64_t SharedRuntime::_nof_inlined_calls = 0;
274 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
275 int64_t SharedRuntime::_nof_static_calls = 0;
276 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
277 int64_t SharedRuntime::_nof_interface_calls = 0;
278 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
279
280 uint SharedRuntime::_new_instance_ctr=0;
281 uint SharedRuntime::_new_array_ctr=0;
282 uint SharedRuntime::_multi2_ctr=0;
283 uint SharedRuntime::_multi3_ctr=0;
284 uint SharedRuntime::_multi4_ctr=0;
285 uint SharedRuntime::_multi5_ctr=0;
286 uint SharedRuntime::_mon_enter_stub_ctr=0;
287 uint SharedRuntime::_mon_exit_stub_ctr=0;
288 uint SharedRuntime::_mon_enter_ctr=0;
302 uint SharedRuntime::_unsafe_set_memory_ctr=0;
303
304 int SharedRuntime::_ICmiss_index = 0;
305 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
306 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
307
308
309 void SharedRuntime::trace_ic_miss(address at) {
310 for (int i = 0; i < _ICmiss_index; i++) {
311 if (_ICmiss_at[i] == at) {
312 _ICmiss_count[i]++;
313 return;
314 }
315 }
316 int index = _ICmiss_index++;
317 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
318 _ICmiss_at[index] = at;
319 _ICmiss_count[index] = 1;
320 }
321
322 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
323 if (ICMissHistogram) {
324 st->print_cr("IC Miss Histogram:");
325 int tot_misses = 0;
326 for (int i = 0; i < _ICmiss_index; i++) {
327 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
328 tot_misses += _ICmiss_count[i];
329 }
330 st->print_cr("Total IC misses: %7d", tot_misses);
331 }
332 }
333
334 #ifdef COMPILER2
335 // Runtime methods for printf-style debug nodes (same printing format as fieldDescriptor::print_on_for)
336 void SharedRuntime::debug_print_value(jboolean x) {
337 tty->print_cr("boolean %d", x);
338 }
339
340 void SharedRuntime::debug_print_value(jbyte x) {
341 tty->print_cr("byte %d", x);
342 }
343
344 void SharedRuntime::debug_print_value(jshort x) {
345 tty->print_cr("short %d", x);
346 }
347
348 void SharedRuntime::debug_print_value(jchar x) {
349 tty->print_cr("char %c %d", isprint(x) ? x : ' ', x);
350 }
796
797 void SharedRuntime::throw_and_post_jvmti_exception(JavaThread* current, Symbol* name, const char *message) {
798 Handle h_exception = Exceptions::new_exception(current, name, message);
799 throw_and_post_jvmti_exception(current, h_exception);
800 }
801
802 // The interpreter code to call this tracing function is only
803 // called/generated when UL is on for redefine, class and has the right level
804 // and tags. Since obsolete methods are never compiled, we don't have
805 // to modify the compilers to generate calls to this function.
806 //
807 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
808 JavaThread* thread, Method* method))
809 if (method->is_obsolete()) {
810 // We are calling an obsolete method, but this is not necessarily
811 // an error. Our method could have been redefined just after we
812 // fetched the Method* from the constant pool.
813 ResourceMark rm;
814 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
815 }
816
817 LogStreamHandle(Trace, interpreter, bytecode) log;
818 if (log.is_enabled()) {
819 ResourceMark rm;
820 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
821 p2i(thread),
822 (method->is_static() ? "static" : "virtual"),
823 method->name_and_sig_as_C_string(),
824 (method->is_native() ? " native" : ""),
825 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
826 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
827 }
828 return 0;
829 JRT_END
830
831 // ret_pc points into caller; we are returning caller's exception handler
832 // for given exception
833 // Note that the implementation of this method assumes it's only called when an exception has actually occured
834 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
835 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
836 assert(nm != nullptr, "must exist");
837 ResourceMark rm;
838
839 #if INCLUDE_JVMCI
840 if (nm->is_compiled_by_jvmci()) {
841 // lookup exception handler for this pc
842 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
843 ExceptionHandlerTable table(nm);
844 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
845 if (t != nullptr) {
846 return nm->code_begin() + t->pco();
847 } else {
1461
1462 // determine call info & receiver
1463 // note: a) receiver is null for static calls
1464 // b) an exception is thrown if receiver is null for non-static calls
1465 CallInfo call_info;
1466 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1467 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1468
1469 NoSafepointVerifier nsv;
1470
1471 methodHandle callee_method(current, call_info.selected_method());
1472
1473 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1474 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1475 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1476 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1477 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1478
1479 assert(!caller_nm->is_unloading(), "It should not be unloading");
1480
1481 // tracing/debugging/statistics
1482 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1483 (is_virtual) ? (&_resolve_virtual_ctr) :
1484 (&_resolve_static_ctr);
1485 AtomicAccess::inc(addr);
1486
1487 #ifndef PRODUCT
1488 if (TraceCallFixup) {
1489 ResourceMark rm(current);
1490 tty->print("resolving %s%s (%s) call to",
1491 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1492 Bytecodes::name(invoke_code));
1493 callee_method->print_short_name(tty);
1494 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1495 p2i(caller_frame.pc()), p2i(callee_method->code()));
1496 }
1497 #endif
1498
1499 if (invoke_code == Bytecodes::_invokestatic) {
1500 assert(callee_method->method_holder()->is_initialized() ||
1501 callee_method->method_holder()->is_reentrant_initialization(current),
1502 "invalid class initialization state for invoke_static");
1503 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1504 // In order to keep class initialization check, do not patch call
1505 // site for static call when the class is not fully initialized.
1506 // Proper check is enforced by call site re-resolution on every invocation.
1507 //
1523
1524 // Make sure the callee nmethod does not get deoptimized and removed before
1525 // we are done patching the code.
1526
1527
1528 CompiledICLocker ml(caller_nm);
1529 if (is_virtual && !is_optimized) {
1530 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1531 inline_cache->update(&call_info, receiver->klass());
1532 } else {
1533 // Callsite is a direct call - set it to the destination method
1534 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1535 callsite->set(callee_method);
1536 }
1537
1538 return callee_method;
1539 }
1540
1541 // Inline caches exist only in compiled code
1542 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1543 PerfTraceTime timer(_perf_ic_miss_total_time);
1544
1545 #ifdef ASSERT
1546 RegisterMap reg_map(current,
1547 RegisterMap::UpdateMap::skip,
1548 RegisterMap::ProcessFrames::include,
1549 RegisterMap::WalkContinuation::skip);
1550 frame stub_frame = current->last_frame();
1551 assert(stub_frame.is_runtime_frame(), "sanity check");
1552 frame caller_frame = stub_frame.sender(®_map);
1553 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1554 #endif /* ASSERT */
1555
1556 methodHandle callee_method;
1557 JRT_BLOCK
1558 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1559 // Return Method* through TLS
1560 current->set_vm_result_metadata(callee_method());
1561 JRT_BLOCK_END
1562 // return compiled code entry point after potential safepoints
1563 return get_resolved_entry(current, callee_method);
1564 JRT_END
1565
1566
1567 // Handle call site that has been made non-entrant
1568 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1569 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1570
1571 // 6243940 We might end up in here if the callee is deoptimized
1572 // as we race to call it. We don't want to take a safepoint if
1573 // the caller was interpreted because the caller frame will look
1574 // interpreted to the stack walkers and arguments are now
1575 // "compiled" so it is much better to make this transition
1576 // invisible to the stack walking code. The i2c path will
1577 // place the callee method in the callee_target. It is stashed
1578 // there because if we try and find the callee by normal means a
1579 // safepoint is possible and have trouble gc'ing the compiled args.
1580 RegisterMap reg_map(current,
1581 RegisterMap::UpdateMap::skip,
1582 RegisterMap::ProcessFrames::include,
1583 RegisterMap::WalkContinuation::skip);
1584 frame stub_frame = current->last_frame();
1585 assert(stub_frame.is_runtime_frame(), "sanity check");
1586 frame caller_frame = stub_frame.sender(®_map);
1587
1588 if (caller_frame.is_interpreted_frame() ||
1589 caller_frame.is_entry_frame() ||
1590 caller_frame.is_upcall_stub_frame()) {
1603 // so bypassing it in c2i adapter is benign.
1604 return callee->get_c2i_no_clinit_check_entry();
1605 } else {
1606 return callee->get_c2i_entry();
1607 }
1608 }
1609
1610 // Must be compiled to compiled path which is safe to stackwalk
1611 methodHandle callee_method;
1612 JRT_BLOCK
1613 // Force resolving of caller (if we called from compiled frame)
1614 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1615 current->set_vm_result_metadata(callee_method());
1616 JRT_BLOCK_END
1617 // return compiled code entry point after potential safepoints
1618 return get_resolved_entry(current, callee_method);
1619 JRT_END
1620
1621 // Handle abstract method call
1622 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1623 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1624
1625 // Verbose error message for AbstractMethodError.
1626 // Get the called method from the invoke bytecode.
1627 vframeStream vfst(current, true);
1628 assert(!vfst.at_end(), "Java frame must exist");
1629 methodHandle caller(current, vfst.method());
1630 Bytecode_invoke invoke(caller, vfst.bci());
1631 DEBUG_ONLY( invoke.verify(); )
1632
1633 // Find the compiled caller frame.
1634 RegisterMap reg_map(current,
1635 RegisterMap::UpdateMap::include,
1636 RegisterMap::ProcessFrames::include,
1637 RegisterMap::WalkContinuation::skip);
1638 frame stubFrame = current->last_frame();
1639 assert(stubFrame.is_runtime_frame(), "must be");
1640 frame callerFrame = stubFrame.sender(®_map);
1641 assert(callerFrame.is_compiled_frame(), "must be");
1642
1643 // Install exception and return forward entry.
1644 address res = SharedRuntime::throw_AbstractMethodError_entry();
1651 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1652 }
1653 JRT_BLOCK_END
1654 return res;
1655 JRT_END
1656
1657 // return verified_code_entry if interp_only_mode is not set for the current thread;
1658 // otherwise return c2i entry.
1659 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1660 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1661 // In interp_only_mode we need to go to the interpreted entry
1662 // The c2i won't patch in this mode -- see fixup_callers_callsite
1663 return callee_method->get_c2i_entry();
1664 }
1665 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1666 return callee_method->verified_code_entry();
1667 }
1668
1669 // resolve a static call and patch code
1670 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1671 PerfTraceTime timer(_perf_resolve_static_total_time);
1672
1673 methodHandle callee_method;
1674 bool enter_special = false;
1675 JRT_BLOCK
1676 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1677 current->set_vm_result_metadata(callee_method());
1678 JRT_BLOCK_END
1679 // return compiled code entry point after potential safepoints
1680 return get_resolved_entry(current, callee_method);
1681 JRT_END
1682
1683 // resolve virtual call and update inline cache to monomorphic
1684 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1685 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1686
1687 methodHandle callee_method;
1688 JRT_BLOCK
1689 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1690 current->set_vm_result_metadata(callee_method());
1691 JRT_BLOCK_END
1692 // return compiled code entry point after potential safepoints
1693 return get_resolved_entry(current, callee_method);
1694 JRT_END
1695
1696
1697 // Resolve a virtual call that can be statically bound (e.g., always
1698 // monomorphic, so it has no inline cache). Patch code to resolved target.
1699 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1700 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1701
1702 methodHandle callee_method;
1703 JRT_BLOCK
1704 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1705 current->set_vm_result_metadata(callee_method());
1706 JRT_BLOCK_END
1707 // return compiled code entry point after potential safepoints
1708 return get_resolved_entry(current, callee_method);
1709 JRT_END
1710
1711 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1712 JavaThread* current = THREAD;
1713 ResourceMark rm(current);
1714 CallInfo call_info;
1715 Bytecodes::Code bc;
1716
1717 // receiver is null for static calls. An exception is thrown for null
1718 // receivers for non-static calls
1719 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1720
1721 methodHandle callee_method(current, call_info.selected_method());
1722
1723 AtomicAccess::inc(&_ic_miss_ctr);
1724
1725 #ifndef PRODUCT
1726 // Statistics & Tracing
1727 if (TraceCallFixup) {
1728 ResourceMark rm(current);
1729 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1730 callee_method->print_short_name(tty);
1731 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1732 }
1733
1734 if (ICMissHistogram) {
1735 MutexLocker m(VMStatistic_lock);
1736 RegisterMap reg_map(current,
1737 RegisterMap::UpdateMap::skip,
1738 RegisterMap::ProcessFrames::include,
1739 RegisterMap::WalkContinuation::skip);
1740 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1741 // produce statistics under the lock
1742 trace_ic_miss(f.pc());
1743 }
1744 #endif
1745
1828 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1829 cdc->set_to_clean();
1830 break;
1831 }
1832
1833 case relocInfo::virtual_call_type: {
1834 // compiled, dispatched call (which used to call an interpreted method)
1835 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1836 inline_cache->set_to_clean();
1837 break;
1838 }
1839 default:
1840 break;
1841 }
1842 }
1843 }
1844 }
1845
1846 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1847
1848 AtomicAccess::inc(&_wrong_method_ctr);
1849
1850 #ifndef PRODUCT
1851 if (TraceCallFixup) {
1852 ResourceMark rm(current);
1853 tty->print("handle_wrong_method reresolving call to");
1854 callee_method->print_short_name(tty);
1855 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1856 }
1857 #endif
1858
1859 return callee_method;
1860 }
1861
1862 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1863 // The faulting unsafe accesses should be changed to throw the error
1864 // synchronously instead. Meanwhile the faulting instruction will be
1865 // skipped over (effectively turning it into a no-op) and an
1866 // asynchronous exception will be raised which the thread will
1867 // handle at a later point. If the instruction is a load it will
1868 // return garbage.
1869
1870 // Request an async exception.
2113 if (CheckJNICalls) {
2114 fatal("Object has been unlocked by JNI");
2115 }
2116 return;
2117 }
2118 ObjectSynchronizer::exit(obj, lock, current);
2119 }
2120
2121 // Handles the uncommon cases of monitor unlocking in compiled code
2122 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2123 assert(current == JavaThread::current(), "pre-condition");
2124 SharedRuntime::monitor_exit_helper(obj, lock, current);
2125 JRT_END
2126
2127 #ifndef PRODUCT
2128
2129 void SharedRuntime::print_statistics() {
2130 ttyLocker ttyl;
2131 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2132
2133 SharedRuntime::print_ic_miss_histogram_on(tty);
2134 SharedRuntime::print_counters_on(tty);
2135 AdapterHandlerLibrary::print_statistics_on(tty);
2136
2137 if (xtty != nullptr) xtty->tail("statistics");
2138 }
2139
2140 //void SharedRuntime::print_counters_on(outputStream* st) {
2141 // // Dump the JRT_ENTRY counters
2142 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2143 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2144 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2145 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2146 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2147 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2148 //
2149 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2150 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2151 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2152 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2153 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2154 //
2155 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2156 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2157 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2158 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2159 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2160 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2161 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2162 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2163 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2164 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2165 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2166 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2167 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2168 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2169 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2170 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2171 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2172 //}
2173
2174 inline double percent(int64_t x, int64_t y) {
2175 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2176 }
2177
2178 class MethodArityHistogram {
2179 public:
2180 enum { MAX_ARITY = 256 };
2181 private:
2182 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2183 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2184 static uint64_t _total_compiled_calls;
2185 static uint64_t _max_compiled_calls_per_method;
2186 static int _max_arity; // max. arity seen
2187 static int _max_size; // max. arg size seen
2188
2189 static void add_method_to_histogram(nmethod* nm) {
2190 Method* method = (nm == nullptr) ? nullptr : nm->method();
2191 if (method != nullptr) {
2192 ArgumentCount args(method->signature());
2193 int arity = args.size() + (method->is_static() ? 0 : 1);
2238 // Take the Compile_lock to protect against changes in the CodeBlob structures
2239 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2240 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2241 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2242 _max_arity = _max_size = 0;
2243 _total_compiled_calls = 0;
2244 _max_compiled_calls_per_method = 0;
2245 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2246 CodeCache::nmethods_do(add_method_to_histogram);
2247 print_histogram();
2248 }
2249 };
2250
2251 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2252 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2253 uint64_t MethodArityHistogram::_total_compiled_calls;
2254 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2255 int MethodArityHistogram::_max_arity;
2256 int MethodArityHistogram::_max_size;
2257
2258 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2259 tty->print_cr("Calls from compiled code:");
2260 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2261 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2262 int64_t mono_i = _nof_interface_calls;
2263 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2264 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2265 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2266 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2267 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2268 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2269 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2270 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2271 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2272 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2273 tty->cr();
2274 tty->print_cr("Note 1: counter updates are not MT-safe.");
2275 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2276 tty->print_cr(" %% in nested categories are relative to their category");
2277 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2278 tty->cr();
2582 }
2583 #endif // INCLUDE_CDS
2584 if (entry == nullptr) {
2585 assert_lock_strong(AdapterHandlerLibrary_lock);
2586 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2587 if (entry_p != nullptr) {
2588 entry = *entry_p;
2589 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2590 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2591 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2592 #ifndef PRODUCT
2593 _runtime_hits++;
2594 #endif
2595 }
2596 }
2597 AdapterFingerPrint::deallocate(fp);
2598 return entry;
2599 }
2600
2601 #ifndef PRODUCT
2602 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2603 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2604 return sizeof(*key) + sizeof(*a);
2605 };
2606 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2607 ts.print(st, "AdapterHandlerTable");
2608 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2609 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2610 int total_hits = _archived_hits + _runtime_hits;
2611 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2612 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2613 }
2614 #endif // !PRODUCT
2615
2616 // ---------------------------------------------------------------------------
2617 // Implementation of AdapterHandlerLibrary
2618 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2619 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2620 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2621 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2622 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2623 #if INCLUDE_CDS
2624 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2625 #endif // INCLUDE_CDS
2626 static const int AdapterHandlerLibrary_size = 16*K;
2627 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2628 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2629
2630 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2631 assert(_buffer != nullptr, "should be initialized");
2632 return _buffer;
2633 }
2634
3496 };
3497 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3498 _adapter_handler_table->iterate(findblob_runtime_table);
3499 }
3500 assert(found, "Should have found handler");
3501 }
3502
3503 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3504 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3505 if (adapter_blob() != nullptr) {
3506 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3507 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3508 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3509 if (get_c2i_no_clinit_check_entry() != nullptr) {
3510 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3511 }
3512 }
3513 st->cr();
3514 }
3515
3516 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3517 assert(current == JavaThread::current(), "pre-condition");
3518 StackOverflow* overflow_state = current->stack_overflow_state();
3519 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3520 overflow_state->set_reserved_stack_activation(current->stack_base());
3521 JRT_END
3522
3523 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3524 ResourceMark rm(current);
3525 frame activation;
3526 nmethod* nm = nullptr;
3527 int count = 1;
3528
3529 assert(fr.is_java_frame(), "Must start on Java frame");
3530
3531 RegisterMap map(JavaThread::current(),
3532 RegisterMap::UpdateMap::skip,
3533 RegisterMap::ProcessFrames::skip,
3534 RegisterMap::WalkContinuation::skip); // don't walk continuations
3535 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|