50 #include "metaprogramming/primitiveConversions.hpp"
51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomic.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/perfData.hpp"
71 #include "runtime/sharedRuntime.hpp"
72 #include "runtime/stackWatermarkSet.hpp"
73 #include "runtime/stubRoutines.hpp"
74 #include "runtime/synchronizer.inline.hpp"
75 #include "runtime/timerTrace.hpp"
76 #include "runtime/vframe.inline.hpp"
77 #include "runtime/vframeArray.hpp"
78 #include "runtime/vm_version.hpp"
79 #include "utilities/copy.hpp"
80 #include "utilities/dtrace.hpp"
81 #include "utilities/events.hpp"
82 #include "utilities/globalDefinitions.hpp"
83 #include "utilities/resourceHash.hpp"
84 #include "utilities/macros.hpp"
85 #include "utilities/xmlstream.hpp"
86 #ifdef COMPILER1
87 #include "c1/c1_Runtime1.hpp"
88 #endif
89 #if INCLUDE_JFR
90 #include "jfr/jfr.hpp"
91 #endif
92
93 // Shared runtime stub routines reside in their own unique blob with a
94 // single entry point
95
96
97 #define SHARED_STUB_FIELD_DEFINE(name, type) \
98 type SharedRuntime::BLOB_FIELD_NAME(name);
99 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
100 #undef SHARED_STUB_FIELD_DEFINE
101
102 nmethod* SharedRuntime::_cont_doYield_stub;
103
104 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
105 const char *SharedRuntime::_stub_names[] = {
106 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
107 };
108
109 //----------------------------generate_stubs-----------------------------------
110 void SharedRuntime::generate_initial_stubs() {
111 // Build this early so it's available for the interpreter.
112 _throw_StackOverflowError_blob =
113 generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
114 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
115 }
116
117 void SharedRuntime::generate_stubs() {
118 _wrong_method_blob =
119 generate_resolve_blob(SharedStubId::wrong_method_id,
120 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
121 _wrong_method_abstract_blob =
122 generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
123 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));
150 generate_throw_exception(SharedStubId::throw_NullPointerException_at_call_id,
151 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
152
153 #if COMPILER2_OR_JVMCI
154 // Vectors are generated only by C2 and JVMCI.
155 bool support_wide = is_wide_vector(MaxVectorSize);
156 if (support_wide) {
157 _polling_page_vectors_safepoint_handler_blob =
158 generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
159 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
160 }
161 #endif // COMPILER2_OR_JVMCI
162 _polling_page_safepoint_handler_blob =
163 generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
164 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
165 _polling_page_return_handler_blob =
166 generate_handler_blob(SharedStubId::polling_page_return_handler_id,
167 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
168
169 generate_deopt_blob();
170 }
171
172 void SharedRuntime::init_adapter_library() {
173 AdapterHandlerLibrary::initialize();
174 }
175
176 #if INCLUDE_JFR
177 //------------------------------generate jfr runtime stubs ------
178 void SharedRuntime::generate_jfr_stubs() {
179 ResourceMark rm;
180 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
181 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
182
183 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
184 _jfr_return_lease_blob = generate_jfr_return_lease();
185 }
186
187 #endif // INCLUDE_JFR
188
189 #include <math.h>
190
191 // Implementation of SharedRuntime
192
193 #ifndef PRODUCT
194 // For statistics
195 uint SharedRuntime::_ic_miss_ctr = 0;
196 uint SharedRuntime::_wrong_method_ctr = 0;
197 uint SharedRuntime::_resolve_static_ctr = 0;
198 uint SharedRuntime::_resolve_virtual_ctr = 0;
199 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
200 uint SharedRuntime::_implicit_null_throws = 0;
201 uint SharedRuntime::_implicit_div0_throws = 0;
202
203 int64_t SharedRuntime::_nof_normal_calls = 0;
204 int64_t SharedRuntime::_nof_inlined_calls = 0;
205 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
206 int64_t SharedRuntime::_nof_static_calls = 0;
207 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
208 int64_t SharedRuntime::_nof_interface_calls = 0;
209 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
210
211 uint SharedRuntime::_new_instance_ctr=0;
212 uint SharedRuntime::_new_array_ctr=0;
213 uint SharedRuntime::_multi2_ctr=0;
214 uint SharedRuntime::_multi3_ctr=0;
215 uint SharedRuntime::_multi4_ctr=0;
216 uint SharedRuntime::_multi5_ctr=0;
217 uint SharedRuntime::_mon_enter_stub_ctr=0;
218 uint SharedRuntime::_mon_exit_stub_ctr=0;
219 uint SharedRuntime::_mon_enter_ctr=0;
233 uint SharedRuntime::_unsafe_set_memory_ctr=0;
234
235 int SharedRuntime::_ICmiss_index = 0;
236 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
237 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
238
239
240 void SharedRuntime::trace_ic_miss(address at) {
241 for (int i = 0; i < _ICmiss_index; i++) {
242 if (_ICmiss_at[i] == at) {
243 _ICmiss_count[i]++;
244 return;
245 }
246 }
247 int index = _ICmiss_index++;
248 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
249 _ICmiss_at[index] = at;
250 _ICmiss_count[index] = 1;
251 }
252
253 void SharedRuntime::print_ic_miss_histogram() {
254 if (ICMissHistogram) {
255 tty->print_cr("IC Miss Histogram:");
256 int tot_misses = 0;
257 for (int i = 0; i < _ICmiss_index; i++) {
258 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
259 tot_misses += _ICmiss_count[i];
260 }
261 tty->print_cr("Total IC misses: %7d", tot_misses);
262 }
263 }
264 #endif // PRODUCT
265
266
267 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
268 return x * y;
269 JRT_END
270
271
272 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
273 if (x == min_jlong && y == CONST64(-1)) {
274 return x;
275 } else {
276 return x / y;
277 }
278 JRT_END
279
280
281 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
282 if (x == min_jlong && y == CONST64(-1)) {
283 return 0;
284 } else {
711 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
712 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
713 JNIHandles::destroy_local(vthread);
714 JRT_END
715 #endif // INCLUDE_JVMTI
716
717 // The interpreter code to call this tracing function is only
718 // called/generated when UL is on for redefine, class and has the right level
719 // and tags. Since obsolete methods are never compiled, we don't have
720 // to modify the compilers to generate calls to this function.
721 //
722 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
723 JavaThread* thread, Method* method))
724 if (method->is_obsolete()) {
725 // We are calling an obsolete method, but this is not necessarily
726 // an error. Our method could have been redefined just after we
727 // fetched the Method* from the constant pool.
728 ResourceMark rm;
729 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
730 }
731 return 0;
732 JRT_END
733
734 // ret_pc points into caller; we are returning caller's exception handler
735 // for given exception
736 // Note that the implementation of this method assumes it's only called when an exception has actually occured
737 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
738 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
739 assert(nm != nullptr, "must exist");
740 ResourceMark rm;
741
742 #if INCLUDE_JVMCI
743 if (nm->is_compiled_by_jvmci()) {
744 // lookup exception handler for this pc
745 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
746 ExceptionHandlerTable table(nm);
747 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
748 if (t != nullptr) {
749 return nm->code_begin() + t->pco();
750 } else {
1350
1351 // determine call info & receiver
1352 // note: a) receiver is null for static calls
1353 // b) an exception is thrown if receiver is null for non-static calls
1354 CallInfo call_info;
1355 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1356 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1357
1358 NoSafepointVerifier nsv;
1359
1360 methodHandle callee_method(current, call_info.selected_method());
1361
1362 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1363 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1364 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1365 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1366 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1367
1368 assert(!caller_nm->is_unloading(), "It should not be unloading");
1369
1370 #ifndef PRODUCT
1371 // tracing/debugging/statistics
1372 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1373 (is_virtual) ? (&_resolve_virtual_ctr) :
1374 (&_resolve_static_ctr);
1375 Atomic::inc(addr);
1376
1377 if (TraceCallFixup) {
1378 ResourceMark rm(current);
1379 tty->print("resolving %s%s (%s) call to",
1380 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1381 Bytecodes::name(invoke_code));
1382 callee_method->print_short_name(tty);
1383 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1384 p2i(caller_frame.pc()), p2i(callee_method->code()));
1385 }
1386 #endif
1387
1388 if (invoke_code == Bytecodes::_invokestatic) {
1389 assert(callee_method->method_holder()->is_initialized() ||
1390 callee_method->method_holder()->is_reentrant_initialization(current),
1391 "invalid class initialization state for invoke_static");
1392 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1393 // In order to keep class initialization check, do not patch call
1394 // site for static call when the class is not fully initialized.
1395 // Proper check is enforced by call site re-resolution on every invocation.
1396 //
1412
1413 // Make sure the callee nmethod does not get deoptimized and removed before
1414 // we are done patching the code.
1415
1416
1417 CompiledICLocker ml(caller_nm);
1418 if (is_virtual && !is_optimized) {
1419 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1420 inline_cache->update(&call_info, receiver->klass());
1421 } else {
1422 // Callsite is a direct call - set it to the destination method
1423 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1424 callsite->set(callee_method);
1425 }
1426
1427 return callee_method;
1428 }
1429
1430 // Inline caches exist only in compiled code
1431 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1432 #ifdef ASSERT
1433 RegisterMap reg_map(current,
1434 RegisterMap::UpdateMap::skip,
1435 RegisterMap::ProcessFrames::include,
1436 RegisterMap::WalkContinuation::skip);
1437 frame stub_frame = current->last_frame();
1438 assert(stub_frame.is_runtime_frame(), "sanity check");
1439 frame caller_frame = stub_frame.sender(®_map);
1440 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1441 #endif /* ASSERT */
1442
1443 methodHandle callee_method;
1444 JRT_BLOCK
1445 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1446 // Return Method* through TLS
1447 current->set_vm_result_metadata(callee_method());
1448 JRT_BLOCK_END
1449 // return compiled code entry point after potential safepoints
1450 return get_resolved_entry(current, callee_method);
1451 JRT_END
1452
1453
1454 // Handle call site that has been made non-entrant
1455 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1456 // 6243940 We might end up in here if the callee is deoptimized
1457 // as we race to call it. We don't want to take a safepoint if
1458 // the caller was interpreted because the caller frame will look
1459 // interpreted to the stack walkers and arguments are now
1460 // "compiled" so it is much better to make this transition
1461 // invisible to the stack walking code. The i2c path will
1462 // place the callee method in the callee_target. It is stashed
1463 // there because if we try and find the callee by normal means a
1464 // safepoint is possible and have trouble gc'ing the compiled args.
1465 RegisterMap reg_map(current,
1466 RegisterMap::UpdateMap::skip,
1467 RegisterMap::ProcessFrames::include,
1468 RegisterMap::WalkContinuation::skip);
1469 frame stub_frame = current->last_frame();
1470 assert(stub_frame.is_runtime_frame(), "sanity check");
1471 frame caller_frame = stub_frame.sender(®_map);
1472
1473 if (caller_frame.is_interpreted_frame() ||
1474 caller_frame.is_entry_frame() ||
1475 caller_frame.is_upcall_stub_frame()) {
1488 // so bypassing it in c2i adapter is benign.
1489 return callee->get_c2i_no_clinit_check_entry();
1490 } else {
1491 return callee->get_c2i_entry();
1492 }
1493 }
1494
1495 // Must be compiled to compiled path which is safe to stackwalk
1496 methodHandle callee_method;
1497 JRT_BLOCK
1498 // Force resolving of caller (if we called from compiled frame)
1499 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1500 current->set_vm_result_metadata(callee_method());
1501 JRT_BLOCK_END
1502 // return compiled code entry point after potential safepoints
1503 return get_resolved_entry(current, callee_method);
1504 JRT_END
1505
1506 // Handle abstract method call
1507 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1508 // Verbose error message for AbstractMethodError.
1509 // Get the called method from the invoke bytecode.
1510 vframeStream vfst(current, true);
1511 assert(!vfst.at_end(), "Java frame must exist");
1512 methodHandle caller(current, vfst.method());
1513 Bytecode_invoke invoke(caller, vfst.bci());
1514 DEBUG_ONLY( invoke.verify(); )
1515
1516 // Find the compiled caller frame.
1517 RegisterMap reg_map(current,
1518 RegisterMap::UpdateMap::include,
1519 RegisterMap::ProcessFrames::include,
1520 RegisterMap::WalkContinuation::skip);
1521 frame stubFrame = current->last_frame();
1522 assert(stubFrame.is_runtime_frame(), "must be");
1523 frame callerFrame = stubFrame.sender(®_map);
1524 assert(callerFrame.is_compiled_frame(), "must be");
1525
1526 // Install exception and return forward entry.
1527 address res = SharedRuntime::throw_AbstractMethodError_entry();
1534 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1535 }
1536 JRT_BLOCK_END
1537 return res;
1538 JRT_END
1539
1540 // return verified_code_entry if interp_only_mode is not set for the current thread;
1541 // otherwise return c2i entry.
1542 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1543 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1544 // In interp_only_mode we need to go to the interpreted entry
1545 // The c2i won't patch in this mode -- see fixup_callers_callsite
1546 return callee_method->get_c2i_entry();
1547 }
1548 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1549 return callee_method->verified_code_entry();
1550 }
1551
1552 // resolve a static call and patch code
1553 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1554 methodHandle callee_method;
1555 bool enter_special = false;
1556 JRT_BLOCK
1557 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1558 current->set_vm_result_metadata(callee_method());
1559 JRT_BLOCK_END
1560 // return compiled code entry point after potential safepoints
1561 return get_resolved_entry(current, callee_method);
1562 JRT_END
1563
1564 // resolve virtual call and update inline cache to monomorphic
1565 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1566 methodHandle callee_method;
1567 JRT_BLOCK
1568 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1569 current->set_vm_result_metadata(callee_method());
1570 JRT_BLOCK_END
1571 // return compiled code entry point after potential safepoints
1572 return get_resolved_entry(current, callee_method);
1573 JRT_END
1574
1575
1576 // Resolve a virtual call that can be statically bound (e.g., always
1577 // monomorphic, so it has no inline cache). Patch code to resolved target.
1578 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1579 methodHandle callee_method;
1580 JRT_BLOCK
1581 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1582 current->set_vm_result_metadata(callee_method());
1583 JRT_BLOCK_END
1584 // return compiled code entry point after potential safepoints
1585 return get_resolved_entry(current, callee_method);
1586 JRT_END
1587
1588 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1589 JavaThread* current = THREAD;
1590 ResourceMark rm(current);
1591 CallInfo call_info;
1592 Bytecodes::Code bc;
1593
1594 // receiver is null for static calls. An exception is thrown for null
1595 // receivers for non-static calls
1596 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1597
1598 methodHandle callee_method(current, call_info.selected_method());
1599
1600 #ifndef PRODUCT
1601 Atomic::inc(&_ic_miss_ctr);
1602
1603 // Statistics & Tracing
1604 if (TraceCallFixup) {
1605 ResourceMark rm(current);
1606 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1607 callee_method->print_short_name(tty);
1608 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1609 }
1610
1611 if (ICMissHistogram) {
1612 MutexLocker m(VMStatistic_lock);
1613 RegisterMap reg_map(current,
1614 RegisterMap::UpdateMap::skip,
1615 RegisterMap::ProcessFrames::include,
1616 RegisterMap::WalkContinuation::skip);
1617 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1618 // produce statistics under the lock
1619 trace_ic_miss(f.pc());
1620 }
1621 #endif
1622
1705 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1706 cdc->set_to_clean();
1707 break;
1708 }
1709
1710 case relocInfo::virtual_call_type: {
1711 // compiled, dispatched call (which used to call an interpreted method)
1712 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1713 inline_cache->set_to_clean();
1714 break;
1715 }
1716 default:
1717 break;
1718 }
1719 }
1720 }
1721 }
1722
1723 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1724
1725
1726 #ifndef PRODUCT
1727 Atomic::inc(&_wrong_method_ctr);
1728
1729 if (TraceCallFixup) {
1730 ResourceMark rm(current);
1731 tty->print("handle_wrong_method reresolving call to");
1732 callee_method->print_short_name(tty);
1733 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1734 }
1735 #endif
1736
1737 return callee_method;
1738 }
1739
1740 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1741 // The faulting unsafe accesses should be changed to throw the error
1742 // synchronously instead. Meanwhile the faulting instruction will be
1743 // skipped over (effectively turning it into a no-op) and an
1744 // asynchronous exception will be raised which the thread will
1745 // handle at a later point. If the instruction is a load it will
1746 // return garbage.
1747
1748 // Request an async exception.
2006 // This is only called when CheckJNICalls is true, and only
2007 // for virtual thread termination.
2008 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2009 assert(CheckJNICalls, "Only call this when checking JNI usage");
2010 if (log_is_enabled(Debug, jni)) {
2011 JavaThread* current = JavaThread::current();
2012 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2013 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2014 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2015 ") exiting with Objects still locked by JNI MonitorEnter.",
2016 vthread_id, carrier_id);
2017 }
2018 JRT_END
2019
2020 #ifndef PRODUCT
2021
2022 void SharedRuntime::print_statistics() {
2023 ttyLocker ttyl;
2024 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2025
2026 SharedRuntime::print_ic_miss_histogram();
2027
2028 // Dump the JRT_ENTRY counters
2029 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2030 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2031 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2032 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2033 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2034 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2035
2036 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2037 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2038 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2039 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2040 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2041
2042 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2043 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2044 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2045 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2046 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2047 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2048 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2049 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2050 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2051 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2052 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2053 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2054 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2055 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2056 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2057 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2058 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2059
2060 AdapterHandlerLibrary::print_statistics();
2061
2062 if (xtty != nullptr) xtty->tail("statistics");
2063 }
2064
2065 inline double percent(int64_t x, int64_t y) {
2066 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2067 }
2068
2069 class MethodArityHistogram {
2070 public:
2071 enum { MAX_ARITY = 256 };
2072 private:
2073 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2074 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2075 static uint64_t _total_compiled_calls;
2076 static uint64_t _max_compiled_calls_per_method;
2077 static int _max_arity; // max. arity seen
2078 static int _max_size; // max. arg size seen
2079
2080 static void add_method_to_histogram(nmethod* nm) {
2081 Method* method = (nm == nullptr) ? nullptr : nm->method();
2082 if (method != nullptr) {
2083 ArgumentCount args(method->signature());
2084 int arity = args.size() + (method->is_static() ? 0 : 1);
2129 // Take the Compile_lock to protect against changes in the CodeBlob structures
2130 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2131 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2132 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2133 _max_arity = _max_size = 0;
2134 _total_compiled_calls = 0;
2135 _max_compiled_calls_per_method = 0;
2136 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2137 CodeCache::nmethods_do(add_method_to_histogram);
2138 print_histogram();
2139 }
2140 };
2141
2142 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2143 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2144 uint64_t MethodArityHistogram::_total_compiled_calls;
2145 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2146 int MethodArityHistogram::_max_arity;
2147 int MethodArityHistogram::_max_size;
2148
2149 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2150 tty->print_cr("Calls from compiled code:");
2151 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2152 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2153 int64_t mono_i = _nof_interface_calls;
2154 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2155 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2156 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2157 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2158 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2159 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2160 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2161 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2162 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2163 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2164 tty->cr();
2165 tty->print_cr("Note 1: counter updates are not MT-safe.");
2166 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2167 tty->print_cr(" %% in nested categories are relative to their category");
2168 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2169 tty->cr();
2439 adapter_fp_equals_compact_hashtable_entry> {};
2440 #endif // INCLUDE_CDS
2441
2442 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2443 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2444 AnyObj::C_HEAP, mtCode,
2445 AdapterFingerPrint::compute_hash,
2446 AdapterFingerPrint::equals>;
2447 static AdapterHandlerTable* _adapter_handler_table;
2448 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2449
2450 // Find a entry with the same fingerprint if it exists
2451 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicType* sig_bt) {
2452 NOT_PRODUCT(_lookups++);
2453 assert_lock_strong(AdapterHandlerLibrary_lock);
2454 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2455 AdapterHandlerEntry* entry = nullptr;
2456 #if INCLUDE_CDS
2457 // if we are building the archive then the archived adapter table is
2458 // not valid and we need to use the ones added to the runtime table
2459 if (!AOTCodeCache::is_dumping_adapters()) {
2460 // Search archived table first. It is read-only table so can be searched without lock
2461 entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2462 if (entry != nullptr) {
2463 #ifndef PRODUCT
2464 if (fp->is_compact()) {
2465 _compact++;
2466 }
2467 _archived_hits++;
2468 #endif
2469 }
2470 }
2471 #endif // INCLUDE_CDS
2472 if (entry == nullptr) {
2473 assert_lock_strong(AdapterHandlerLibrary_lock);
2474 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2475 if (entry_p != nullptr) {
2476 entry = *entry_p;
2477 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2478 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2479 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2480 #ifndef PRODUCT
2481 if (fp->is_compact()) _compact++;
2482 _runtime_hits++;
2483 #endif
2484 }
2485 }
2486 AdapterFingerPrint::deallocate(fp);
2487 return entry;
2488 }
2489
2490 #ifndef PRODUCT
2491 static void print_table_statistics() {
2492 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2493 return sizeof(*key) + sizeof(*a);
2494 };
2495 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2496 ts.print(tty, "AdapterHandlerTable");
2497 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2498 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2499 int total_hits = _archived_hits + _runtime_hits;
2500 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d) compact %d",
2501 _lookups, _equals, total_hits, _archived_hits, _runtime_hits, _compact);
2502 }
2503 #endif
2504
2505 // ---------------------------------------------------------------------------
2506 // Implementation of AdapterHandlerLibrary
2507 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2508 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2509 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2510 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2511 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2512 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2513 #if INCLUDE_CDS
2514 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2515 #endif // INCLUDE_CDS
2516 static const int AdapterHandlerLibrary_size = 16*K;
2517 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2518
2519 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2520 assert(_buffer != nullptr, "should be initialized");
2521 return _buffer;
2522 }
2523
2821 comp_args_on_stack,
2822 sig_bt,
2823 regs,
2824 handler);
2825 #ifdef ASSERT
2826 if (VerifyAdapterSharing) {
2827 handler->save_code(buf->code_begin(), buffer.insts_size());
2828 if (is_transient) {
2829 return true;
2830 }
2831 }
2832 #endif
2833
2834 adapter_blob = AdapterBlob::create(&buffer);
2835 if (adapter_blob == nullptr) {
2836 // CodeCache is full, disable compilation
2837 // Ought to log this but compile log is only per compile thread
2838 // and we're some non descript Java thread.
2839 return false;
2840 }
2841 if (!is_transient && AOTCodeCache::is_dumping_adapters()) {
2842 // try to save generated code
2843 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2844 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2845 int entry_offset[AdapterHandlerEntry::ENTRIES_COUNT];
2846 assert(AdapterHandlerEntry::ENTRIES_COUNT == 4, "sanity");
2847 address i2c_entry = handler->get_i2c_entry();
2848 entry_offset[0] = 0; // i2c_entry offset
2849 entry_offset[1] = handler->get_c2i_entry() - i2c_entry;
2850 entry_offset[2] = handler->get_c2i_unverified_entry() - i2c_entry;
2851 entry_offset[3] = handler->get_c2i_no_clinit_check_entry() - i2c_entry;
2852 bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, entry_offset);
2853 assert(success || !AOTCodeCache::is_dumping_adapters(), "caching of adapter must be disabled");
2854 }
2855 handler->relocate(adapter_blob->content_begin());
2856 #ifndef PRODUCT
2857 // debugging support
2858 if (PrintAdapterHandlers || PrintStubCode) {
2859 print_adapter_handler_info(tty, handler, adapter_blob);
2860 }
2861 #endif
2862 return true;
2863 }
2864
2865 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
2866 int total_args_passed,
2867 BasicType* sig_bt,
2868 bool is_transient) {
2869 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2870 AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
2871 if (!generate_adapter_code(adapter_blob, handler, total_args_passed, sig_bt, is_transient)) {
2872 AdapterHandlerEntry::deallocate(handler);
2873 return nullptr;
2944 AdapterBlob* blob = lookup_aot_cache(handler);
2945 #ifndef PRODUCT
2946 // debugging support
2947 if ((blob != nullptr) && (PrintAdapterHandlers || PrintStubCode)) {
2948 print_adapter_handler_info(tty, handler, blob);
2949 }
2950 #endif
2951 return blob;
2952 }
2953
2954 // This method is used during production run to link archived adapters (stored in AOT Cache)
2955 // to their code in AOT Code Cache
2956 void AdapterHandlerEntry::link() {
2957 AdapterBlob* adapter_blob = nullptr;
2958 ResourceMark rm;
2959 assert(_fingerprint != nullptr, "_fingerprint must not be null");
2960 bool generate_code = false;
2961 // Generate code only if AOTCodeCache is not available, or
2962 // caching adapters is disabled, or we fail to link
2963 // the AdapterHandlerEntry to its code in the AOTCodeCache
2964 if (AOTCodeCache::is_using_adapters()) {
2965 adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this);
2966 if (adapter_blob == nullptr) {
2967 log_warning(cds)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
2968 generate_code = true;
2969 }
2970 } else {
2971 generate_code = true;
2972 }
2973 if (generate_code) {
2974 int nargs;
2975 BasicType* bt = _fingerprint->as_basic_type(nargs);
2976 if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, nargs, bt, /* is_transient */ false)) {
2977 // Don't throw exceptions during VM initialization because java.lang.* classes
2978 // might not have been initialized, causing problems when constructing the
2979 // Java exception object.
2980 vm_exit_during_initialization("Out of space in CodeCache for adapters");
2981 }
2982 }
2983 // Outside of the lock
2984 if (adapter_blob != nullptr) {
3431 assert(found, "Should have found handler");
3432 }
3433
3434 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3435 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3436 if (get_i2c_entry() != nullptr) {
3437 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3438 }
3439 if (get_c2i_entry() != nullptr) {
3440 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3441 }
3442 if (get_c2i_unverified_entry() != nullptr) {
3443 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3444 }
3445 if (get_c2i_no_clinit_check_entry() != nullptr) {
3446 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3447 }
3448 st->cr();
3449 }
3450
3451 #ifndef PRODUCT
3452
3453 void AdapterHandlerLibrary::print_statistics() {
3454 print_table_statistics();
3455 }
3456
3457 #endif /* PRODUCT */
3458
3459 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3460 if (entry == _abstract_method_handler) {
3461 return true;
3462 }
3463 return false;
3464 }
3465
3466 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3467 assert(current == JavaThread::current(), "pre-condition");
3468 StackOverflow* overflow_state = current->stack_overflow_state();
3469 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3470 overflow_state->set_reserved_stack_activation(current->stack_base());
3471 JRT_END
3472
3473 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3474 ResourceMark rm(current);
3475 frame activation;
3476 nmethod* nm = nullptr;
3477 int count = 1;
3478
|
50 #include "metaprogramming/primitiveConversions.hpp"
51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomic.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/perfData.inline.hpp"
71 #include "runtime/sharedRuntime.hpp"
72 #include "runtime/stackWatermarkSet.hpp"
73 #include "runtime/stubRoutines.hpp"
74 #include "runtime/synchronizer.inline.hpp"
75 #include "runtime/timerTrace.hpp"
76 #include "runtime/vframe.inline.hpp"
77 #include "runtime/vframeArray.hpp"
78 #include "runtime/vm_version.hpp"
79 #include "services/management.hpp"
80 #include "utilities/copy.hpp"
81 #include "utilities/dtrace.hpp"
82 #include "utilities/events.hpp"
83 #include "utilities/globalDefinitions.hpp"
84 #include "utilities/resourceHash.hpp"
85 #include "utilities/macros.hpp"
86 #include "utilities/xmlstream.hpp"
87 #ifdef COMPILER1
88 #include "c1/c1_Runtime1.hpp"
89 #endif
90 #if INCLUDE_JFR
91 #include "jfr/jfr.hpp"
92 #endif
93
94 // Shared runtime stub routines reside in their own unique blob with a
95 // single entry point
96
97
98 #define SHARED_STUB_FIELD_DEFINE(name, type) \
99 type SharedRuntime::BLOB_FIELD_NAME(name);
100 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
101 #undef SHARED_STUB_FIELD_DEFINE
102
103 nmethod* SharedRuntime::_cont_doYield_stub;
104
105 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
106 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
107 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
108 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
109 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
110
111 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
112 const char *SharedRuntime::_stub_names[] = {
113 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
114 };
115
116 //----------------------------generate_stubs-----------------------------------
117 void SharedRuntime::generate_initial_stubs() {
118 // Build this early so it's available for the interpreter.
119 _throw_StackOverflowError_blob =
120 generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
121 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
122 }
123
124 void SharedRuntime::generate_stubs() {
125 _wrong_method_blob =
126 generate_resolve_blob(SharedStubId::wrong_method_id,
127 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
128 _wrong_method_abstract_blob =
129 generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
130 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));
157 generate_throw_exception(SharedStubId::throw_NullPointerException_at_call_id,
158 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
159
160 #if COMPILER2_OR_JVMCI
161 // Vectors are generated only by C2 and JVMCI.
162 bool support_wide = is_wide_vector(MaxVectorSize);
163 if (support_wide) {
164 _polling_page_vectors_safepoint_handler_blob =
165 generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
166 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
167 }
168 #endif // COMPILER2_OR_JVMCI
169 _polling_page_safepoint_handler_blob =
170 generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
171 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
172 _polling_page_return_handler_blob =
173 generate_handler_blob(SharedStubId::polling_page_return_handler_id,
174 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
175
176 generate_deopt_blob();
177
178 if (UsePerfData) {
179 EXCEPTION_MARK;
180 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
181 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
182 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
183 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
184 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
185 if (HAS_PENDING_EXCEPTION) {
186 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
187 }
188 }
189 }
190
191 void SharedRuntime::init_adapter_library() {
192 AdapterHandlerLibrary::initialize();
193 }
194
195 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
196 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
197 if (TraceThreadTime) {
198 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
199 }
200 st->print(" / %5d events", cnt);
201 st->cr();
202 }
203
204 void SharedRuntime::print_counters_on(outputStream* st) {
205 st->print_cr("SharedRuntime:");
206 if (UsePerfData) {
207 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
208 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
209 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
210 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
211 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
212
213 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
214 _perf_resolve_virtual_total_time->elapsed_counter_value() +
215 _perf_resolve_static_total_time->elapsed_counter_value() +
216 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
217 _perf_ic_miss_total_time->elapsed_counter_value());
218 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
219 if (TraceThreadTime) {
220 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
221 _perf_resolve_virtual_total_time->thread_counter_value() +
222 _perf_resolve_static_total_time->thread_counter_value() +
223 _perf_handle_wrong_method_total_time->thread_counter_value() +
224 _perf_ic_miss_total_time->thread_counter_value());
225 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
226
227 }
228 st->cr();
229 } else {
230 st->print_cr(" no data (UsePerfData is turned off)");
231 }
232 }
233
234 #if INCLUDE_JFR
235 //------------------------------generate jfr runtime stubs ------
236 void SharedRuntime::generate_jfr_stubs() {
237 ResourceMark rm;
238 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
239 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
240
241 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
242 _jfr_return_lease_blob = generate_jfr_return_lease();
243 }
244
245 #endif // INCLUDE_JFR
246
247 #include <math.h>
248
249 // Implementation of SharedRuntime
250
251 // For statistics
252 uint SharedRuntime::_ic_miss_ctr = 0;
253 uint SharedRuntime::_wrong_method_ctr = 0;
254 uint SharedRuntime::_resolve_static_ctr = 0;
255 uint SharedRuntime::_resolve_virtual_ctr = 0;
256 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
257
258 #ifndef PRODUCT
259 uint SharedRuntime::_implicit_null_throws = 0;
260 uint SharedRuntime::_implicit_div0_throws = 0;
261
262 int64_t SharedRuntime::_nof_normal_calls = 0;
263 int64_t SharedRuntime::_nof_inlined_calls = 0;
264 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
265 int64_t SharedRuntime::_nof_static_calls = 0;
266 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
267 int64_t SharedRuntime::_nof_interface_calls = 0;
268 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
269
270 uint SharedRuntime::_new_instance_ctr=0;
271 uint SharedRuntime::_new_array_ctr=0;
272 uint SharedRuntime::_multi2_ctr=0;
273 uint SharedRuntime::_multi3_ctr=0;
274 uint SharedRuntime::_multi4_ctr=0;
275 uint SharedRuntime::_multi5_ctr=0;
276 uint SharedRuntime::_mon_enter_stub_ctr=0;
277 uint SharedRuntime::_mon_exit_stub_ctr=0;
278 uint SharedRuntime::_mon_enter_ctr=0;
292 uint SharedRuntime::_unsafe_set_memory_ctr=0;
293
294 int SharedRuntime::_ICmiss_index = 0;
295 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
296 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
297
298
299 void SharedRuntime::trace_ic_miss(address at) {
300 for (int i = 0; i < _ICmiss_index; i++) {
301 if (_ICmiss_at[i] == at) {
302 _ICmiss_count[i]++;
303 return;
304 }
305 }
306 int index = _ICmiss_index++;
307 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
308 _ICmiss_at[index] = at;
309 _ICmiss_count[index] = 1;
310 }
311
312 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
313 if (ICMissHistogram) {
314 st->print_cr("IC Miss Histogram:");
315 int tot_misses = 0;
316 for (int i = 0; i < _ICmiss_index; i++) {
317 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
318 tot_misses += _ICmiss_count[i];
319 }
320 st->print_cr("Total IC misses: %7d", tot_misses);
321 }
322 }
323 #endif // !PRODUCT
324
325
326 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
327 return x * y;
328 JRT_END
329
330
331 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
332 if (x == min_jlong && y == CONST64(-1)) {
333 return x;
334 } else {
335 return x / y;
336 }
337 JRT_END
338
339
340 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
341 if (x == min_jlong && y == CONST64(-1)) {
342 return 0;
343 } else {
770 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
771 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
772 JNIHandles::destroy_local(vthread);
773 JRT_END
774 #endif // INCLUDE_JVMTI
775
776 // The interpreter code to call this tracing function is only
777 // called/generated when UL is on for redefine, class and has the right level
778 // and tags. Since obsolete methods are never compiled, we don't have
779 // to modify the compilers to generate calls to this function.
780 //
781 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
782 JavaThread* thread, Method* method))
783 if (method->is_obsolete()) {
784 // We are calling an obsolete method, but this is not necessarily
785 // an error. Our method could have been redefined just after we
786 // fetched the Method* from the constant pool.
787 ResourceMark rm;
788 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
789 }
790
791 LogStreamHandle(Trace, interpreter, bytecode) log;
792 if (log.is_enabled()) {
793 ResourceMark rm;
794 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
795 p2i(thread),
796 (method->is_static() ? "static" : "virtual"),
797 method->name_and_sig_as_C_string(),
798 (method->is_native() ? " native" : ""),
799 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
800 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
801 }
802 return 0;
803 JRT_END
804
805 // ret_pc points into caller; we are returning caller's exception handler
806 // for given exception
807 // Note that the implementation of this method assumes it's only called when an exception has actually occured
808 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
809 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
810 assert(nm != nullptr, "must exist");
811 ResourceMark rm;
812
813 #if INCLUDE_JVMCI
814 if (nm->is_compiled_by_jvmci()) {
815 // lookup exception handler for this pc
816 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
817 ExceptionHandlerTable table(nm);
818 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
819 if (t != nullptr) {
820 return nm->code_begin() + t->pco();
821 } else {
1421
1422 // determine call info & receiver
1423 // note: a) receiver is null for static calls
1424 // b) an exception is thrown if receiver is null for non-static calls
1425 CallInfo call_info;
1426 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1427 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1428
1429 NoSafepointVerifier nsv;
1430
1431 methodHandle callee_method(current, call_info.selected_method());
1432
1433 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1434 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1435 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1436 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1437 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1438
1439 assert(!caller_nm->is_unloading(), "It should not be unloading");
1440
1441 // tracing/debugging/statistics
1442 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1443 (is_virtual) ? (&_resolve_virtual_ctr) :
1444 (&_resolve_static_ctr);
1445 Atomic::inc(addr);
1446
1447 #ifndef PRODUCT
1448 if (TraceCallFixup) {
1449 ResourceMark rm(current);
1450 tty->print("resolving %s%s (%s) call to",
1451 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1452 Bytecodes::name(invoke_code));
1453 callee_method->print_short_name(tty);
1454 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1455 p2i(caller_frame.pc()), p2i(callee_method->code()));
1456 }
1457 #endif
1458
1459 if (invoke_code == Bytecodes::_invokestatic) {
1460 assert(callee_method->method_holder()->is_initialized() ||
1461 callee_method->method_holder()->is_reentrant_initialization(current),
1462 "invalid class initialization state for invoke_static");
1463 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1464 // In order to keep class initialization check, do not patch call
1465 // site for static call when the class is not fully initialized.
1466 // Proper check is enforced by call site re-resolution on every invocation.
1467 //
1483
1484 // Make sure the callee nmethod does not get deoptimized and removed before
1485 // we are done patching the code.
1486
1487
1488 CompiledICLocker ml(caller_nm);
1489 if (is_virtual && !is_optimized) {
1490 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1491 inline_cache->update(&call_info, receiver->klass());
1492 } else {
1493 // Callsite is a direct call - set it to the destination method
1494 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1495 callsite->set(callee_method);
1496 }
1497
1498 return callee_method;
1499 }
1500
1501 // Inline caches exist only in compiled code
1502 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1503 PerfTraceTime timer(_perf_ic_miss_total_time);
1504
1505 #ifdef ASSERT
1506 RegisterMap reg_map(current,
1507 RegisterMap::UpdateMap::skip,
1508 RegisterMap::ProcessFrames::include,
1509 RegisterMap::WalkContinuation::skip);
1510 frame stub_frame = current->last_frame();
1511 assert(stub_frame.is_runtime_frame(), "sanity check");
1512 frame caller_frame = stub_frame.sender(®_map);
1513 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1514 #endif /* ASSERT */
1515
1516 methodHandle callee_method;
1517 JRT_BLOCK
1518 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1519 // Return Method* through TLS
1520 current->set_vm_result_metadata(callee_method());
1521 JRT_BLOCK_END
1522 // return compiled code entry point after potential safepoints
1523 return get_resolved_entry(current, callee_method);
1524 JRT_END
1525
1526
1527 // Handle call site that has been made non-entrant
1528 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1529 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1530
1531 // 6243940 We might end up in here if the callee is deoptimized
1532 // as we race to call it. We don't want to take a safepoint if
1533 // the caller was interpreted because the caller frame will look
1534 // interpreted to the stack walkers and arguments are now
1535 // "compiled" so it is much better to make this transition
1536 // invisible to the stack walking code. The i2c path will
1537 // place the callee method in the callee_target. It is stashed
1538 // there because if we try and find the callee by normal means a
1539 // safepoint is possible and have trouble gc'ing the compiled args.
1540 RegisterMap reg_map(current,
1541 RegisterMap::UpdateMap::skip,
1542 RegisterMap::ProcessFrames::include,
1543 RegisterMap::WalkContinuation::skip);
1544 frame stub_frame = current->last_frame();
1545 assert(stub_frame.is_runtime_frame(), "sanity check");
1546 frame caller_frame = stub_frame.sender(®_map);
1547
1548 if (caller_frame.is_interpreted_frame() ||
1549 caller_frame.is_entry_frame() ||
1550 caller_frame.is_upcall_stub_frame()) {
1563 // so bypassing it in c2i adapter is benign.
1564 return callee->get_c2i_no_clinit_check_entry();
1565 } else {
1566 return callee->get_c2i_entry();
1567 }
1568 }
1569
1570 // Must be compiled to compiled path which is safe to stackwalk
1571 methodHandle callee_method;
1572 JRT_BLOCK
1573 // Force resolving of caller (if we called from compiled frame)
1574 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1575 current->set_vm_result_metadata(callee_method());
1576 JRT_BLOCK_END
1577 // return compiled code entry point after potential safepoints
1578 return get_resolved_entry(current, callee_method);
1579 JRT_END
1580
1581 // Handle abstract method call
1582 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1583 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1584
1585 // Verbose error message for AbstractMethodError.
1586 // Get the called method from the invoke bytecode.
1587 vframeStream vfst(current, true);
1588 assert(!vfst.at_end(), "Java frame must exist");
1589 methodHandle caller(current, vfst.method());
1590 Bytecode_invoke invoke(caller, vfst.bci());
1591 DEBUG_ONLY( invoke.verify(); )
1592
1593 // Find the compiled caller frame.
1594 RegisterMap reg_map(current,
1595 RegisterMap::UpdateMap::include,
1596 RegisterMap::ProcessFrames::include,
1597 RegisterMap::WalkContinuation::skip);
1598 frame stubFrame = current->last_frame();
1599 assert(stubFrame.is_runtime_frame(), "must be");
1600 frame callerFrame = stubFrame.sender(®_map);
1601 assert(callerFrame.is_compiled_frame(), "must be");
1602
1603 // Install exception and return forward entry.
1604 address res = SharedRuntime::throw_AbstractMethodError_entry();
1611 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1612 }
1613 JRT_BLOCK_END
1614 return res;
1615 JRT_END
1616
1617 // return verified_code_entry if interp_only_mode is not set for the current thread;
1618 // otherwise return c2i entry.
1619 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1620 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1621 // In interp_only_mode we need to go to the interpreted entry
1622 // The c2i won't patch in this mode -- see fixup_callers_callsite
1623 return callee_method->get_c2i_entry();
1624 }
1625 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1626 return callee_method->verified_code_entry();
1627 }
1628
1629 // resolve a static call and patch code
1630 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1631 PerfTraceTime timer(_perf_resolve_static_total_time);
1632
1633 methodHandle callee_method;
1634 bool enter_special = false;
1635 JRT_BLOCK
1636 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1637 current->set_vm_result_metadata(callee_method());
1638 JRT_BLOCK_END
1639 // return compiled code entry point after potential safepoints
1640 return get_resolved_entry(current, callee_method);
1641 JRT_END
1642
1643 // resolve virtual call and update inline cache to monomorphic
1644 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1645 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1646
1647 methodHandle callee_method;
1648 JRT_BLOCK
1649 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1650 current->set_vm_result_metadata(callee_method());
1651 JRT_BLOCK_END
1652 // return compiled code entry point after potential safepoints
1653 return get_resolved_entry(current, callee_method);
1654 JRT_END
1655
1656
1657 // Resolve a virtual call that can be statically bound (e.g., always
1658 // monomorphic, so it has no inline cache). Patch code to resolved target.
1659 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1660 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1661
1662 methodHandle callee_method;
1663 JRT_BLOCK
1664 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1665 current->set_vm_result_metadata(callee_method());
1666 JRT_BLOCK_END
1667 // return compiled code entry point after potential safepoints
1668 return get_resolved_entry(current, callee_method);
1669 JRT_END
1670
1671 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1672 JavaThread* current = THREAD;
1673 ResourceMark rm(current);
1674 CallInfo call_info;
1675 Bytecodes::Code bc;
1676
1677 // receiver is null for static calls. An exception is thrown for null
1678 // receivers for non-static calls
1679 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1680
1681 methodHandle callee_method(current, call_info.selected_method());
1682
1683 Atomic::inc(&_ic_miss_ctr);
1684
1685 #ifndef PRODUCT
1686 // Statistics & Tracing
1687 if (TraceCallFixup) {
1688 ResourceMark rm(current);
1689 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1690 callee_method->print_short_name(tty);
1691 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1692 }
1693
1694 if (ICMissHistogram) {
1695 MutexLocker m(VMStatistic_lock);
1696 RegisterMap reg_map(current,
1697 RegisterMap::UpdateMap::skip,
1698 RegisterMap::ProcessFrames::include,
1699 RegisterMap::WalkContinuation::skip);
1700 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1701 // produce statistics under the lock
1702 trace_ic_miss(f.pc());
1703 }
1704 #endif
1705
1788 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1789 cdc->set_to_clean();
1790 break;
1791 }
1792
1793 case relocInfo::virtual_call_type: {
1794 // compiled, dispatched call (which used to call an interpreted method)
1795 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1796 inline_cache->set_to_clean();
1797 break;
1798 }
1799 default:
1800 break;
1801 }
1802 }
1803 }
1804 }
1805
1806 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1807
1808 Atomic::inc(&_wrong_method_ctr);
1809
1810 #ifndef PRODUCT
1811 if (TraceCallFixup) {
1812 ResourceMark rm(current);
1813 tty->print("handle_wrong_method reresolving call to");
1814 callee_method->print_short_name(tty);
1815 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1816 }
1817 #endif
1818
1819 return callee_method;
1820 }
1821
1822 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1823 // The faulting unsafe accesses should be changed to throw the error
1824 // synchronously instead. Meanwhile the faulting instruction will be
1825 // skipped over (effectively turning it into a no-op) and an
1826 // asynchronous exception will be raised which the thread will
1827 // handle at a later point. If the instruction is a load it will
1828 // return garbage.
1829
1830 // Request an async exception.
2088 // This is only called when CheckJNICalls is true, and only
2089 // for virtual thread termination.
2090 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2091 assert(CheckJNICalls, "Only call this when checking JNI usage");
2092 if (log_is_enabled(Debug, jni)) {
2093 JavaThread* current = JavaThread::current();
2094 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2095 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2096 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2097 ") exiting with Objects still locked by JNI MonitorEnter.",
2098 vthread_id, carrier_id);
2099 }
2100 JRT_END
2101
2102 #ifndef PRODUCT
2103
2104 void SharedRuntime::print_statistics() {
2105 ttyLocker ttyl;
2106 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2107
2108 SharedRuntime::print_ic_miss_histogram_on(tty);
2109 SharedRuntime::print_counters_on(tty);
2110 AdapterHandlerLibrary::print_statistics_on(tty);
2111
2112 if (xtty != nullptr) xtty->tail("statistics");
2113 }
2114
2115 //void SharedRuntime::print_counters_on(outputStream* st) {
2116 // // Dump the JRT_ENTRY counters
2117 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2118 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2119 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2120 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2121 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2122 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2123 //
2124 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2125 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2126 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2127 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2128 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2129 //
2130 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2131 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2132 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2133 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2134 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2135 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2136 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2137 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2138 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2139 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2140 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2141 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2142 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2143 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2144 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2145 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2146 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2147 //}
2148
2149 inline double percent(int64_t x, int64_t y) {
2150 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2151 }
2152
2153 class MethodArityHistogram {
2154 public:
2155 enum { MAX_ARITY = 256 };
2156 private:
2157 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2158 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2159 static uint64_t _total_compiled_calls;
2160 static uint64_t _max_compiled_calls_per_method;
2161 static int _max_arity; // max. arity seen
2162 static int _max_size; // max. arg size seen
2163
2164 static void add_method_to_histogram(nmethod* nm) {
2165 Method* method = (nm == nullptr) ? nullptr : nm->method();
2166 if (method != nullptr) {
2167 ArgumentCount args(method->signature());
2168 int arity = args.size() + (method->is_static() ? 0 : 1);
2213 // Take the Compile_lock to protect against changes in the CodeBlob structures
2214 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2215 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2216 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2217 _max_arity = _max_size = 0;
2218 _total_compiled_calls = 0;
2219 _max_compiled_calls_per_method = 0;
2220 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2221 CodeCache::nmethods_do(add_method_to_histogram);
2222 print_histogram();
2223 }
2224 };
2225
2226 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2227 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2228 uint64_t MethodArityHistogram::_total_compiled_calls;
2229 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2230 int MethodArityHistogram::_max_arity;
2231 int MethodArityHistogram::_max_size;
2232
2233 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2234 tty->print_cr("Calls from compiled code:");
2235 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2236 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2237 int64_t mono_i = _nof_interface_calls;
2238 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2239 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2240 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2241 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2242 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2243 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2244 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2245 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2246 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2247 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2248 tty->cr();
2249 tty->print_cr("Note 1: counter updates are not MT-safe.");
2250 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2251 tty->print_cr(" %% in nested categories are relative to their category");
2252 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2253 tty->cr();
2523 adapter_fp_equals_compact_hashtable_entry> {};
2524 #endif // INCLUDE_CDS
2525
2526 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2527 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2528 AnyObj::C_HEAP, mtCode,
2529 AdapterFingerPrint::compute_hash,
2530 AdapterFingerPrint::equals>;
2531 static AdapterHandlerTable* _adapter_handler_table;
2532 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2533
2534 // Find a entry with the same fingerprint if it exists
2535 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicType* sig_bt) {
2536 NOT_PRODUCT(_lookups++);
2537 assert_lock_strong(AdapterHandlerLibrary_lock);
2538 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2539 AdapterHandlerEntry* entry = nullptr;
2540 #if INCLUDE_CDS
2541 // if we are building the archive then the archived adapter table is
2542 // not valid and we need to use the ones added to the runtime table
2543 if (!AOTCodeCache::is_dumping_adapter()) {
2544 // Search archived table first. It is read-only table so can be searched without lock
2545 entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2546 if (entry != nullptr) {
2547 #ifndef PRODUCT
2548 if (fp->is_compact()) {
2549 _compact++;
2550 }
2551 _archived_hits++;
2552 #endif
2553 }
2554 }
2555 #endif // INCLUDE_CDS
2556 if (entry == nullptr) {
2557 assert_lock_strong(AdapterHandlerLibrary_lock);
2558 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2559 if (entry_p != nullptr) {
2560 entry = *entry_p;
2561 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2562 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2563 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2564 #ifndef PRODUCT
2565 if (fp->is_compact()) _compact++;
2566 _runtime_hits++;
2567 #endif
2568 }
2569 }
2570 AdapterFingerPrint::deallocate(fp);
2571 return entry;
2572 }
2573
2574 #ifndef PRODUCT
2575 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2576 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2577 return sizeof(*key) + sizeof(*a);
2578 };
2579 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2580 ts.print(st, "AdapterHandlerTable");
2581 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2582 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2583 int total_hits = _archived_hits + _runtime_hits;
2584 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d) compact %d",
2585 _lookups, _equals, total_hits, _archived_hits, _runtime_hits, _compact);
2586 }
2587 #endif // !PRODUCT
2588
2589 // ---------------------------------------------------------------------------
2590 // Implementation of AdapterHandlerLibrary
2591 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2592 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2593 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2594 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2595 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2596 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2597 #if INCLUDE_CDS
2598 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2599 #endif // INCLUDE_CDS
2600 static const int AdapterHandlerLibrary_size = 16*K;
2601 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2602
2603 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2604 assert(_buffer != nullptr, "should be initialized");
2605 return _buffer;
2606 }
2607
2905 comp_args_on_stack,
2906 sig_bt,
2907 regs,
2908 handler);
2909 #ifdef ASSERT
2910 if (VerifyAdapterSharing) {
2911 handler->save_code(buf->code_begin(), buffer.insts_size());
2912 if (is_transient) {
2913 return true;
2914 }
2915 }
2916 #endif
2917
2918 adapter_blob = AdapterBlob::create(&buffer);
2919 if (adapter_blob == nullptr) {
2920 // CodeCache is full, disable compilation
2921 // Ought to log this but compile log is only per compile thread
2922 // and we're some non descript Java thread.
2923 return false;
2924 }
2925 if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
2926 // try to save generated code
2927 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2928 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2929 int entry_offset[AdapterHandlerEntry::ENTRIES_COUNT];
2930 assert(AdapterHandlerEntry::ENTRIES_COUNT == 4, "sanity");
2931 address i2c_entry = handler->get_i2c_entry();
2932 entry_offset[0] = 0; // i2c_entry offset
2933 entry_offset[1] = handler->get_c2i_entry() - i2c_entry;
2934 entry_offset[2] = handler->get_c2i_unverified_entry() - i2c_entry;
2935 entry_offset[3] = handler->get_c2i_no_clinit_check_entry() - i2c_entry;
2936 bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, entry_offset);
2937 assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
2938 }
2939 handler->relocate(adapter_blob->content_begin());
2940 #ifndef PRODUCT
2941 // debugging support
2942 if (PrintAdapterHandlers || PrintStubCode) {
2943 print_adapter_handler_info(tty, handler, adapter_blob);
2944 }
2945 #endif
2946 return true;
2947 }
2948
2949 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
2950 int total_args_passed,
2951 BasicType* sig_bt,
2952 bool is_transient) {
2953 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2954 AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
2955 if (!generate_adapter_code(adapter_blob, handler, total_args_passed, sig_bt, is_transient)) {
2956 AdapterHandlerEntry::deallocate(handler);
2957 return nullptr;
3028 AdapterBlob* blob = lookup_aot_cache(handler);
3029 #ifndef PRODUCT
3030 // debugging support
3031 if ((blob != nullptr) && (PrintAdapterHandlers || PrintStubCode)) {
3032 print_adapter_handler_info(tty, handler, blob);
3033 }
3034 #endif
3035 return blob;
3036 }
3037
3038 // This method is used during production run to link archived adapters (stored in AOT Cache)
3039 // to their code in AOT Code Cache
3040 void AdapterHandlerEntry::link() {
3041 AdapterBlob* adapter_blob = nullptr;
3042 ResourceMark rm;
3043 assert(_fingerprint != nullptr, "_fingerprint must not be null");
3044 bool generate_code = false;
3045 // Generate code only if AOTCodeCache is not available, or
3046 // caching adapters is disabled, or we fail to link
3047 // the AdapterHandlerEntry to its code in the AOTCodeCache
3048 if (AOTCodeCache::is_using_adapter()) {
3049 adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this);
3050 if (adapter_blob == nullptr) {
3051 log_warning(cds)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3052 generate_code = true;
3053 }
3054 } else {
3055 generate_code = true;
3056 }
3057 if (generate_code) {
3058 int nargs;
3059 BasicType* bt = _fingerprint->as_basic_type(nargs);
3060 if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, nargs, bt, /* is_transient */ false)) {
3061 // Don't throw exceptions during VM initialization because java.lang.* classes
3062 // might not have been initialized, causing problems when constructing the
3063 // Java exception object.
3064 vm_exit_during_initialization("Out of space in CodeCache for adapters");
3065 }
3066 }
3067 // Outside of the lock
3068 if (adapter_blob != nullptr) {
3515 assert(found, "Should have found handler");
3516 }
3517
3518 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3519 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3520 if (get_i2c_entry() != nullptr) {
3521 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3522 }
3523 if (get_c2i_entry() != nullptr) {
3524 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3525 }
3526 if (get_c2i_unverified_entry() != nullptr) {
3527 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3528 }
3529 if (get_c2i_no_clinit_check_entry() != nullptr) {
3530 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3531 }
3532 st->cr();
3533 }
3534
3535 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3536 if (entry == _abstract_method_handler) {
3537 return true;
3538 }
3539 return false;
3540 }
3541
3542 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3543 assert(current == JavaThread::current(), "pre-condition");
3544 StackOverflow* overflow_state = current->stack_overflow_state();
3545 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3546 overflow_state->set_reserved_stack_activation(current->stack_base());
3547 JRT_END
3548
3549 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3550 ResourceMark rm(current);
3551 frame activation;
3552 nmethod* nm = nullptr;
3553 int count = 1;
3554
|