< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

  50 #include "metaprogramming/primitiveConversions.hpp"
  51 #include "oops/klass.hpp"
  52 #include "oops/method.inline.hpp"
  53 #include "oops/objArrayKlass.hpp"
  54 #include "oops/oop.inline.hpp"
  55 #include "prims/forte.hpp"
  56 #include "prims/jvmtiExport.hpp"
  57 #include "prims/jvmtiThreadState.hpp"
  58 #include "prims/methodHandles.hpp"
  59 #include "prims/nativeLookup.hpp"
  60 #include "runtime/arguments.hpp"
  61 #include "runtime/atomic.hpp"
  62 #include "runtime/basicLock.inline.hpp"
  63 #include "runtime/frame.inline.hpp"
  64 #include "runtime/handles.inline.hpp"
  65 #include "runtime/init.hpp"
  66 #include "runtime/interfaceSupport.inline.hpp"
  67 #include "runtime/java.hpp"
  68 #include "runtime/javaCalls.hpp"
  69 #include "runtime/jniHandles.inline.hpp"
  70 #include "runtime/perfData.hpp"
  71 #include "runtime/sharedRuntime.hpp"
  72 #include "runtime/stackWatermarkSet.hpp"
  73 #include "runtime/stubRoutines.hpp"
  74 #include "runtime/synchronizer.inline.hpp"
  75 #include "runtime/timerTrace.hpp"
  76 #include "runtime/vframe.inline.hpp"
  77 #include "runtime/vframeArray.hpp"
  78 #include "runtime/vm_version.hpp"

  79 #include "utilities/copy.hpp"
  80 #include "utilities/dtrace.hpp"
  81 #include "utilities/events.hpp"
  82 #include "utilities/globalDefinitions.hpp"
  83 #include "utilities/hashTable.hpp"
  84 #include "utilities/macros.hpp"
  85 #include "utilities/xmlstream.hpp"
  86 #ifdef COMPILER1
  87 #include "c1/c1_Runtime1.hpp"
  88 #endif
  89 #if INCLUDE_JFR
  90 #include "jfr/jfr.inline.hpp"
  91 #endif
  92 
  93 // Shared runtime stub routines reside in their own unique blob with a
  94 // single entry point
  95 
  96 
  97 #define SHARED_STUB_FIELD_DEFINE(name, type) \
  98   type*       SharedRuntime::BLOB_FIELD_NAME(name);
  99   SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
 100 #undef SHARED_STUB_FIELD_DEFINE
 101 
 102 nmethod*            SharedRuntime::_cont_doYield_stub;
 103 






 104 #if 0
 105 // TODO tweak global stub name generation to match this
 106 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
 107 const char *SharedRuntime::_stub_names[] = {
 108   SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
 109 };
 110 #endif
 111 
 112 //----------------------------generate_stubs-----------------------------------
 113 void SharedRuntime::generate_initial_stubs() {
 114   // Build this early so it's available for the interpreter.
 115   _throw_StackOverflowError_blob =
 116     generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
 117                              CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
 118 }
 119 
 120 void SharedRuntime::generate_stubs() {
 121   _wrong_method_blob =
 122     generate_resolve_blob(StubId::shared_wrong_method_id,
 123                           CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));

 153     generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
 154                              CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
 155 
 156 #if COMPILER2_OR_JVMCI
 157   // Vectors are generated only by C2 and JVMCI.
 158   bool support_wide = is_wide_vector(MaxVectorSize);
 159   if (support_wide) {
 160     _polling_page_vectors_safepoint_handler_blob =
 161       generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
 162                             CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 163   }
 164 #endif // COMPILER2_OR_JVMCI
 165   _polling_page_safepoint_handler_blob =
 166     generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
 167                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 168   _polling_page_return_handler_blob =
 169     generate_handler_blob(StubId::shared_polling_page_return_handler_id,
 170                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 171 
 172   generate_deopt_blob();












 173 }
 174 
 175 void SharedRuntime::init_adapter_library() {
 176   AdapterHandlerLibrary::initialize();
 177 }
 178 







































 179 #if INCLUDE_JFR
 180 //------------------------------generate jfr runtime stubs ------
 181 void SharedRuntime::generate_jfr_stubs() {
 182   ResourceMark rm;
 183   const char* timer_msg = "SharedRuntime generate_jfr_stubs";
 184   TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
 185 
 186   _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
 187   _jfr_return_lease_blob = generate_jfr_return_lease();
 188 }
 189 
 190 #endif // INCLUDE_JFR
 191 
 192 #include <math.h>
 193 
 194 // Implementation of SharedRuntime
 195 
 196 #ifndef PRODUCT
 197 // For statistics
 198 uint SharedRuntime::_ic_miss_ctr = 0;
 199 uint SharedRuntime::_wrong_method_ctr = 0;
 200 uint SharedRuntime::_resolve_static_ctr = 0;
 201 uint SharedRuntime::_resolve_virtual_ctr = 0;
 202 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;


 203 uint SharedRuntime::_implicit_null_throws = 0;
 204 uint SharedRuntime::_implicit_div0_throws = 0;
 205 
 206 int64_t SharedRuntime::_nof_normal_calls = 0;
 207 int64_t SharedRuntime::_nof_inlined_calls = 0;
 208 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 209 int64_t SharedRuntime::_nof_static_calls = 0;
 210 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 211 int64_t SharedRuntime::_nof_interface_calls = 0;
 212 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 213 
 214 uint SharedRuntime::_new_instance_ctr=0;
 215 uint SharedRuntime::_new_array_ctr=0;
 216 uint SharedRuntime::_multi2_ctr=0;
 217 uint SharedRuntime::_multi3_ctr=0;
 218 uint SharedRuntime::_multi4_ctr=0;
 219 uint SharedRuntime::_multi5_ctr=0;
 220 uint SharedRuntime::_mon_enter_stub_ctr=0;
 221 uint SharedRuntime::_mon_exit_stub_ctr=0;
 222 uint SharedRuntime::_mon_enter_ctr=0;

 236 uint SharedRuntime::_unsafe_set_memory_ctr=0;
 237 
 238 int     SharedRuntime::_ICmiss_index                    = 0;
 239 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 240 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 241 
 242 
 243 void SharedRuntime::trace_ic_miss(address at) {
 244   for (int i = 0; i < _ICmiss_index; i++) {
 245     if (_ICmiss_at[i] == at) {
 246       _ICmiss_count[i]++;
 247       return;
 248     }
 249   }
 250   int index = _ICmiss_index++;
 251   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 252   _ICmiss_at[index] = at;
 253   _ICmiss_count[index] = 1;
 254 }
 255 
 256 void SharedRuntime::print_ic_miss_histogram() {
 257   if (ICMissHistogram) {
 258     tty->print_cr("IC Miss Histogram:");
 259     int tot_misses = 0;
 260     for (int i = 0; i < _ICmiss_index; i++) {
 261       tty->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 262       tot_misses += _ICmiss_count[i];
 263     }
 264     tty->print_cr("Total IC misses: %7d", tot_misses);
 265   }
 266 }
 267 #endif // PRODUCT
 268 
 269 
 270 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 271   return x * y;
 272 JRT_END
 273 
 274 
 275 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 276   if (x == min_jlong && y == CONST64(-1)) {
 277     return x;
 278   } else {
 279     return x / y;
 280   }
 281 JRT_END
 282 
 283 
 284 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 285   if (x == min_jlong && y == CONST64(-1)) {
 286     return 0;
 287   } else {

 715   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 716   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 717   JNIHandles::destroy_local(vthread);
 718 JRT_END
 719 #endif // INCLUDE_JVMTI
 720 
 721 // The interpreter code to call this tracing function is only
 722 // called/generated when UL is on for redefine, class and has the right level
 723 // and tags. Since obsolete methods are never compiled, we don't have
 724 // to modify the compilers to generate calls to this function.
 725 //
 726 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 727     JavaThread* thread, Method* method))
 728   if (method->is_obsolete()) {
 729     // We are calling an obsolete method, but this is not necessarily
 730     // an error. Our method could have been redefined just after we
 731     // fetched the Method* from the constant pool.
 732     ResourceMark rm;
 733     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 734   }












 735   return 0;
 736 JRT_END
 737 
 738 // ret_pc points into caller; we are returning caller's exception handler
 739 // for given exception
 740 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 741 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 742                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 743   assert(nm != nullptr, "must exist");
 744   ResourceMark rm;
 745 
 746 #if INCLUDE_JVMCI
 747   if (nm->is_compiled_by_jvmci()) {
 748     // lookup exception handler for this pc
 749     int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
 750     ExceptionHandlerTable table(nm);
 751     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 752     if (t != nullptr) {
 753       return nm->code_begin() + t->pco();
 754     } else {

1354 
1355   // determine call info & receiver
1356   // note: a) receiver is null for static calls
1357   //       b) an exception is thrown if receiver is null for non-static calls
1358   CallInfo call_info;
1359   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1360   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1361 
1362   NoSafepointVerifier nsv;
1363 
1364   methodHandle callee_method(current, call_info.selected_method());
1365 
1366   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1367          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1368          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1369          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1370          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1371 
1372   assert(!caller_nm->is_unloading(), "It should not be unloading");
1373 
1374 #ifndef PRODUCT
1375   // tracing/debugging/statistics
1376   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1377                  (is_virtual) ? (&_resolve_virtual_ctr) :
1378                                 (&_resolve_static_ctr);
1379   Atomic::inc(addr);
1380 

1381   if (TraceCallFixup) {
1382     ResourceMark rm(current);
1383     tty->print("resolving %s%s (%s) call to",
1384                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1385                Bytecodes::name(invoke_code));
1386     callee_method->print_short_name(tty);
1387     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1388                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1389   }
1390 #endif
1391 
1392   if (invoke_code == Bytecodes::_invokestatic) {
1393     assert(callee_method->method_holder()->is_initialized() ||
1394            callee_method->method_holder()->is_reentrant_initialization(current),
1395            "invalid class initialization state for invoke_static");
1396     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1397       // In order to keep class initialization check, do not patch call
1398       // site for static call when the class is not fully initialized.
1399       // Proper check is enforced by call site re-resolution on every invocation.
1400       //

1416 
1417   // Make sure the callee nmethod does not get deoptimized and removed before
1418   // we are done patching the code.
1419 
1420 
1421   CompiledICLocker ml(caller_nm);
1422   if (is_virtual && !is_optimized) {
1423     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1424     inline_cache->update(&call_info, receiver->klass());
1425   } else {
1426     // Callsite is a direct call - set it to the destination method
1427     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1428     callsite->set(callee_method);
1429   }
1430 
1431   return callee_method;
1432 }
1433 
1434 // Inline caches exist only in compiled code
1435 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))


1436 #ifdef ASSERT
1437   RegisterMap reg_map(current,
1438                       RegisterMap::UpdateMap::skip,
1439                       RegisterMap::ProcessFrames::include,
1440                       RegisterMap::WalkContinuation::skip);
1441   frame stub_frame = current->last_frame();
1442   assert(stub_frame.is_runtime_frame(), "sanity check");
1443   frame caller_frame = stub_frame.sender(&reg_map);
1444   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1445 #endif /* ASSERT */
1446 
1447   methodHandle callee_method;
1448   JRT_BLOCK
1449     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1450     // Return Method* through TLS
1451     current->set_vm_result_metadata(callee_method());
1452   JRT_BLOCK_END
1453   // return compiled code entry point after potential safepoints
1454   return get_resolved_entry(current, callee_method);
1455 JRT_END
1456 
1457 
1458 // Handle call site that has been made non-entrant
1459 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))


1460   // 6243940 We might end up in here if the callee is deoptimized
1461   // as we race to call it.  We don't want to take a safepoint if
1462   // the caller was interpreted because the caller frame will look
1463   // interpreted to the stack walkers and arguments are now
1464   // "compiled" so it is much better to make this transition
1465   // invisible to the stack walking code. The i2c path will
1466   // place the callee method in the callee_target. It is stashed
1467   // there because if we try and find the callee by normal means a
1468   // safepoint is possible and have trouble gc'ing the compiled args.
1469   RegisterMap reg_map(current,
1470                       RegisterMap::UpdateMap::skip,
1471                       RegisterMap::ProcessFrames::include,
1472                       RegisterMap::WalkContinuation::skip);
1473   frame stub_frame = current->last_frame();
1474   assert(stub_frame.is_runtime_frame(), "sanity check");
1475   frame caller_frame = stub_frame.sender(&reg_map);
1476 
1477   if (caller_frame.is_interpreted_frame() ||
1478       caller_frame.is_entry_frame() ||
1479       caller_frame.is_upcall_stub_frame()) {

1492       // so bypassing it in c2i adapter is benign.
1493       return callee->get_c2i_no_clinit_check_entry();
1494     } else {
1495       return callee->get_c2i_entry();
1496     }
1497   }
1498 
1499   // Must be compiled to compiled path which is safe to stackwalk
1500   methodHandle callee_method;
1501   JRT_BLOCK
1502     // Force resolving of caller (if we called from compiled frame)
1503     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1504     current->set_vm_result_metadata(callee_method());
1505   JRT_BLOCK_END
1506   // return compiled code entry point after potential safepoints
1507   return get_resolved_entry(current, callee_method);
1508 JRT_END
1509 
1510 // Handle abstract method call
1511 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))


1512   // Verbose error message for AbstractMethodError.
1513   // Get the called method from the invoke bytecode.
1514   vframeStream vfst(current, true);
1515   assert(!vfst.at_end(), "Java frame must exist");
1516   methodHandle caller(current, vfst.method());
1517   Bytecode_invoke invoke(caller, vfst.bci());
1518   DEBUG_ONLY( invoke.verify(); )
1519 
1520   // Find the compiled caller frame.
1521   RegisterMap reg_map(current,
1522                       RegisterMap::UpdateMap::include,
1523                       RegisterMap::ProcessFrames::include,
1524                       RegisterMap::WalkContinuation::skip);
1525   frame stubFrame = current->last_frame();
1526   assert(stubFrame.is_runtime_frame(), "must be");
1527   frame callerFrame = stubFrame.sender(&reg_map);
1528   assert(callerFrame.is_compiled_frame(), "must be");
1529 
1530   // Install exception and return forward entry.
1531   address res = SharedRuntime::throw_AbstractMethodError_entry();

1538       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1539     }
1540   JRT_BLOCK_END
1541   return res;
1542 JRT_END
1543 
1544 // return verified_code_entry if interp_only_mode is not set for the current thread;
1545 // otherwise return c2i entry.
1546 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1547   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1548     // In interp_only_mode we need to go to the interpreted entry
1549     // The c2i won't patch in this mode -- see fixup_callers_callsite
1550     return callee_method->get_c2i_entry();
1551   }
1552   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1553   return callee_method->verified_code_entry();
1554 }
1555 
1556 // resolve a static call and patch code
1557 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))


1558   methodHandle callee_method;
1559   bool enter_special = false;
1560   JRT_BLOCK
1561     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1562     current->set_vm_result_metadata(callee_method());
1563   JRT_BLOCK_END
1564   // return compiled code entry point after potential safepoints
1565   return get_resolved_entry(current, callee_method);
1566 JRT_END
1567 
1568 // resolve virtual call and update inline cache to monomorphic
1569 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))


1570   methodHandle callee_method;
1571   JRT_BLOCK
1572     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1573     current->set_vm_result_metadata(callee_method());
1574   JRT_BLOCK_END
1575   // return compiled code entry point after potential safepoints
1576   return get_resolved_entry(current, callee_method);
1577 JRT_END
1578 
1579 
1580 // Resolve a virtual call that can be statically bound (e.g., always
1581 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1582 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))


1583   methodHandle callee_method;
1584   JRT_BLOCK
1585     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1586     current->set_vm_result_metadata(callee_method());
1587   JRT_BLOCK_END
1588   // return compiled code entry point after potential safepoints
1589   return get_resolved_entry(current, callee_method);
1590 JRT_END
1591 
1592 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1593   JavaThread* current = THREAD;
1594   ResourceMark rm(current);
1595   CallInfo call_info;
1596   Bytecodes::Code bc;
1597 
1598   // receiver is null for static calls. An exception is thrown for null
1599   // receivers for non-static calls
1600   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1601 
1602   methodHandle callee_method(current, call_info.selected_method());
1603 
1604 #ifndef PRODUCT
1605   Atomic::inc(&_ic_miss_ctr);
1606 

1607   // Statistics & Tracing
1608   if (TraceCallFixup) {
1609     ResourceMark rm(current);
1610     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1611     callee_method->print_short_name(tty);
1612     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1613   }
1614 
1615   if (ICMissHistogram) {
1616     MutexLocker m(VMStatistic_lock);
1617     RegisterMap reg_map(current,
1618                         RegisterMap::UpdateMap::skip,
1619                         RegisterMap::ProcessFrames::include,
1620                         RegisterMap::WalkContinuation::skip);
1621     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1622     // produce statistics under the lock
1623     trace_ic_miss(f.pc());
1624   }
1625 #endif
1626 

1709             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1710             cdc->set_to_clean();
1711             break;
1712           }
1713 
1714           case relocInfo::virtual_call_type: {
1715             // compiled, dispatched call (which used to call an interpreted method)
1716             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1717             inline_cache->set_to_clean();
1718             break;
1719           }
1720           default:
1721             break;
1722         }
1723       }
1724     }
1725   }
1726 
1727   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1728 
1729 
1730 #ifndef PRODUCT
1731   Atomic::inc(&_wrong_method_ctr);
1732 

1733   if (TraceCallFixup) {
1734     ResourceMark rm(current);
1735     tty->print("handle_wrong_method reresolving call to");
1736     callee_method->print_short_name(tty);
1737     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1738   }
1739 #endif
1740 
1741   return callee_method;
1742 }
1743 
1744 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1745   // The faulting unsafe accesses should be changed to throw the error
1746   // synchronously instead. Meanwhile the faulting instruction will be
1747   // skipped over (effectively turning it into a no-op) and an
1748   // asynchronous exception will be raised which the thread will
1749   // handle at a later point. If the instruction is a load it will
1750   // return garbage.
1751 
1752   // Request an async exception.

2010 // This is only called when CheckJNICalls is true, and only
2011 // for virtual thread termination.
2012 JRT_LEAF(void,  SharedRuntime::log_jni_monitor_still_held())
2013   assert(CheckJNICalls, "Only call this when checking JNI usage");
2014   if (log_is_enabled(Debug, jni)) {
2015     JavaThread* current = JavaThread::current();
2016     int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2017     int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2018     log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2019                    ") exiting with Objects still locked by JNI MonitorEnter.",
2020                    vthread_id, carrier_id);
2021   }
2022 JRT_END
2023 
2024 #ifndef PRODUCT
2025 
2026 void SharedRuntime::print_statistics() {
2027   ttyLocker ttyl;
2028   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
2029 
2030   SharedRuntime::print_ic_miss_histogram();
2031 
2032   // Dump the JRT_ENTRY counters
2033   if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2034   if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2035   if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2036   if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2037   if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2038   if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2039 
2040   tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2041   tty->print_cr("%5u wrong method", _wrong_method_ctr);
2042   tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2043   tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2044   tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2045 
2046   if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2047   if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2048   if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2049   if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2050   if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2051   if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2052   if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2053   if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2054   if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2055   if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2056   if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2057   if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2058   if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2059   if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2060   if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2061   if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2062   if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2063 
2064   AdapterHandlerLibrary::print_statistics();
2065 
2066   if (xtty != nullptr)  xtty->tail("statistics");
2067 }
2068 


































2069 inline double percent(int64_t x, int64_t y) {
2070   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2071 }
2072 
2073 class MethodArityHistogram {
2074  public:
2075   enum { MAX_ARITY = 256 };
2076  private:
2077   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2078   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2079   static uint64_t _total_compiled_calls;
2080   static uint64_t _max_compiled_calls_per_method;
2081   static int _max_arity;                       // max. arity seen
2082   static int _max_size;                        // max. arg size seen
2083 
2084   static void add_method_to_histogram(nmethod* nm) {
2085     Method* method = (nm == nullptr) ? nullptr : nm->method();
2086     if (method != nullptr) {
2087       ArgumentCount args(method->signature());
2088       int arity   = args.size() + (method->is_static() ? 0 : 1);

2133     // Take the Compile_lock to protect against changes in the CodeBlob structures
2134     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2135     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2136     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2137     _max_arity = _max_size = 0;
2138     _total_compiled_calls = 0;
2139     _max_compiled_calls_per_method = 0;
2140     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2141     CodeCache::nmethods_do(add_method_to_histogram);
2142     print_histogram();
2143   }
2144 };
2145 
2146 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2147 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2148 uint64_t MethodArityHistogram::_total_compiled_calls;
2149 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2150 int MethodArityHistogram::_max_arity;
2151 int MethodArityHistogram::_max_size;
2152 
2153 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2154   tty->print_cr("Calls from compiled code:");
2155   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2156   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2157   int64_t mono_i = _nof_interface_calls;
2158   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2159   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2160   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2161   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2162   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2163   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2164   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2165   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2166   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2167   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2168   tty->cr();
2169   tty->print_cr("Note 1: counter updates are not MT-safe.");
2170   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2171   tty->print_cr("        %% in nested categories are relative to their category");
2172   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2173   tty->cr();

2477   }
2478 #endif // INCLUDE_CDS
2479   if (entry == nullptr) {
2480     assert_lock_strong(AdapterHandlerLibrary_lock);
2481     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2482     if (entry_p != nullptr) {
2483       entry = *entry_p;
2484       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2485              entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2486              fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2487   #ifndef PRODUCT
2488       _runtime_hits++;
2489   #endif
2490     }
2491   }
2492   AdapterFingerPrint::deallocate(fp);
2493   return entry;
2494 }
2495 
2496 #ifndef PRODUCT
2497 static void print_table_statistics() {
2498   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2499     return sizeof(*key) + sizeof(*a);
2500   };
2501   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2502   ts.print(tty, "AdapterHandlerTable");
2503   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2504                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2505   int total_hits = _archived_hits + _runtime_hits;
2506   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2507                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2508 }
2509 #endif
2510 
2511 // ---------------------------------------------------------------------------
2512 // Implementation of AdapterHandlerLibrary
2513 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2514 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2515 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2516 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2517 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2518 #if INCLUDE_CDS
2519 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2520 #endif // INCLUDE_CDS
2521 static const int AdapterHandlerLibrary_size = 16*K;
2522 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2523 
2524 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2525   assert(_buffer != nullptr, "should be initialized");
2526   return _buffer;
2527 }
2528 
2529 static void post_adapter_creation(const AdapterBlob* new_adapter,

3452   assert(found, "Should have found handler");
3453 }
3454 
3455 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3456   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3457   if (get_i2c_entry() != nullptr) {
3458     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3459   }
3460   if (get_c2i_entry() != nullptr) {
3461     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3462   }
3463   if (get_c2i_unverified_entry() != nullptr) {
3464     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3465   }
3466   if (get_c2i_no_clinit_check_entry() != nullptr) {
3467     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3468   }
3469   st->cr();
3470 }
3471 
3472 #ifndef PRODUCT
3473 
3474 void AdapterHandlerLibrary::print_statistics() {
3475   print_table_statistics();
3476 }
3477 
3478 #endif /* PRODUCT */
3479 
3480 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3481   assert(current == JavaThread::current(), "pre-condition");
3482   StackOverflow* overflow_state = current->stack_overflow_state();
3483   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3484   overflow_state->set_reserved_stack_activation(current->stack_base());
3485 JRT_END
3486 
3487 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3488   ResourceMark rm(current);
3489   frame activation;
3490   nmethod* nm = nullptr;
3491   int count = 1;
3492 
3493   assert(fr.is_java_frame(), "Must start on Java frame");
3494 
3495   RegisterMap map(JavaThread::current(),
3496                   RegisterMap::UpdateMap::skip,
3497                   RegisterMap::ProcessFrames::skip,
3498                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3499   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {

  50 #include "metaprogramming/primitiveConversions.hpp"
  51 #include "oops/klass.hpp"
  52 #include "oops/method.inline.hpp"
  53 #include "oops/objArrayKlass.hpp"
  54 #include "oops/oop.inline.hpp"
  55 #include "prims/forte.hpp"
  56 #include "prims/jvmtiExport.hpp"
  57 #include "prims/jvmtiThreadState.hpp"
  58 #include "prims/methodHandles.hpp"
  59 #include "prims/nativeLookup.hpp"
  60 #include "runtime/arguments.hpp"
  61 #include "runtime/atomic.hpp"
  62 #include "runtime/basicLock.inline.hpp"
  63 #include "runtime/frame.inline.hpp"
  64 #include "runtime/handles.inline.hpp"
  65 #include "runtime/init.hpp"
  66 #include "runtime/interfaceSupport.inline.hpp"
  67 #include "runtime/java.hpp"
  68 #include "runtime/javaCalls.hpp"
  69 #include "runtime/jniHandles.inline.hpp"
  70 #include "runtime/perfData.inline.hpp"
  71 #include "runtime/sharedRuntime.hpp"
  72 #include "runtime/stackWatermarkSet.hpp"
  73 #include "runtime/stubRoutines.hpp"
  74 #include "runtime/synchronizer.inline.hpp"
  75 #include "runtime/timerTrace.hpp"
  76 #include "runtime/vframe.inline.hpp"
  77 #include "runtime/vframeArray.hpp"
  78 #include "runtime/vm_version.hpp"
  79 #include "services/management.hpp"
  80 #include "utilities/copy.hpp"
  81 #include "utilities/dtrace.hpp"
  82 #include "utilities/events.hpp"
  83 #include "utilities/globalDefinitions.hpp"
  84 #include "utilities/hashTable.hpp"
  85 #include "utilities/macros.hpp"
  86 #include "utilities/xmlstream.hpp"
  87 #ifdef COMPILER1
  88 #include "c1/c1_Runtime1.hpp"
  89 #endif
  90 #if INCLUDE_JFR
  91 #include "jfr/jfr.inline.hpp"
  92 #endif
  93 
  94 // Shared runtime stub routines reside in their own unique blob with a
  95 // single entry point
  96 
  97 
  98 #define SHARED_STUB_FIELD_DEFINE(name, type) \
  99   type*       SharedRuntime::BLOB_FIELD_NAME(name);
 100   SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
 101 #undef SHARED_STUB_FIELD_DEFINE
 102 
 103 nmethod*            SharedRuntime::_cont_doYield_stub;
 104 
 105 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
 106 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time     = nullptr;
 107 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time      = nullptr;
 108 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
 109 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time             = nullptr;
 110 
 111 #if 0
 112 // TODO tweak global stub name generation to match this
 113 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
 114 const char *SharedRuntime::_stub_names[] = {
 115   SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
 116 };
 117 #endif
 118 
 119 //----------------------------generate_stubs-----------------------------------
 120 void SharedRuntime::generate_initial_stubs() {
 121   // Build this early so it's available for the interpreter.
 122   _throw_StackOverflowError_blob =
 123     generate_throw_exception(StubId::shared_throw_StackOverflowError_id,
 124                              CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
 125 }
 126 
 127 void SharedRuntime::generate_stubs() {
 128   _wrong_method_blob =
 129     generate_resolve_blob(StubId::shared_wrong_method_id,
 130                           CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));

 160     generate_throw_exception(StubId::shared_throw_NullPointerException_at_call_id,
 161                              CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
 162 
 163 #if COMPILER2_OR_JVMCI
 164   // Vectors are generated only by C2 and JVMCI.
 165   bool support_wide = is_wide_vector(MaxVectorSize);
 166   if (support_wide) {
 167     _polling_page_vectors_safepoint_handler_blob =
 168       generate_handler_blob(StubId::shared_polling_page_vectors_safepoint_handler_id,
 169                             CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 170   }
 171 #endif // COMPILER2_OR_JVMCI
 172   _polling_page_safepoint_handler_blob =
 173     generate_handler_blob(StubId::shared_polling_page_safepoint_handler_id,
 174                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 175   _polling_page_return_handler_blob =
 176     generate_handler_blob(StubId::shared_polling_page_return_handler_id,
 177                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 178 
 179   generate_deopt_blob();
 180 
 181   if (UsePerfData) {
 182     EXCEPTION_MARK;
 183     NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
 184     NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time,     SUN_CI, "resovle_virtual_call");
 185     NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time,      SUN_CI, "resovle_static_call");
 186     NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
 187     NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time ,            SUN_CI, "ic_miss");
 188     if (HAS_PENDING_EXCEPTION) {
 189       vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
 190     }
 191   }
 192 }
 193 
 194 void SharedRuntime::init_adapter_library() {
 195   AdapterHandlerLibrary::initialize();
 196 }
 197 
 198 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
 199   st->print("  %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
 200   if (TraceThreadTime) {
 201     st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
 202   }
 203   st->print(" / %5d events", cnt);
 204   st->cr();
 205 }
 206 
 207 void SharedRuntime::print_counters_on(outputStream* st) {
 208   st->print_cr("SharedRuntime:");
 209   if (UsePerfData) {
 210     print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
 211     print_counter_on(st, "resolve_virtual_call:",     _perf_resolve_virtual_total_time,     _resolve_virtual_ctr);
 212     print_counter_on(st, "resolve_static_call:",      _perf_resolve_static_total_time,      _resolve_static_ctr);
 213     print_counter_on(st, "handle_wrong_method:",      _perf_handle_wrong_method_total_time, _wrong_method_ctr);
 214     print_counter_on(st, "ic_miss:",                  _perf_ic_miss_total_time,             _ic_miss_ctr);
 215 
 216     jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
 217                                                           _perf_resolve_virtual_total_time->elapsed_counter_value() +
 218                                                           _perf_resolve_static_total_time->elapsed_counter_value() +
 219                                                           _perf_handle_wrong_method_total_time->elapsed_counter_value() +
 220                                                           _perf_ic_miss_total_time->elapsed_counter_value());
 221     st->print("Total:                      " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
 222     if (TraceThreadTime) {
 223       jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
 224                                                            _perf_resolve_virtual_total_time->thread_counter_value() +
 225                                                            _perf_resolve_static_total_time->thread_counter_value() +
 226                                                            _perf_handle_wrong_method_total_time->thread_counter_value() +
 227                                                            _perf_ic_miss_total_time->thread_counter_value());
 228       st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
 229 
 230     }
 231     st->cr();
 232   } else {
 233     st->print_cr("  no data (UsePerfData is turned off)");
 234   }
 235 }
 236 
 237 #if INCLUDE_JFR
 238 //------------------------------generate jfr runtime stubs ------
 239 void SharedRuntime::generate_jfr_stubs() {
 240   ResourceMark rm;
 241   const char* timer_msg = "SharedRuntime generate_jfr_stubs";
 242   TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
 243 
 244   _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
 245   _jfr_return_lease_blob = generate_jfr_return_lease();
 246 }
 247 
 248 #endif // INCLUDE_JFR
 249 
 250 #include <math.h>
 251 
 252 // Implementation of SharedRuntime
 253 

 254 // For statistics
 255 uint SharedRuntime::_ic_miss_ctr = 0;
 256 uint SharedRuntime::_wrong_method_ctr = 0;
 257 uint SharedRuntime::_resolve_static_ctr = 0;
 258 uint SharedRuntime::_resolve_virtual_ctr = 0;
 259 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
 260 
 261 #ifndef PRODUCT
 262 uint SharedRuntime::_implicit_null_throws = 0;
 263 uint SharedRuntime::_implicit_div0_throws = 0;
 264 
 265 int64_t SharedRuntime::_nof_normal_calls = 0;
 266 int64_t SharedRuntime::_nof_inlined_calls = 0;
 267 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 268 int64_t SharedRuntime::_nof_static_calls = 0;
 269 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 270 int64_t SharedRuntime::_nof_interface_calls = 0;
 271 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 272 
 273 uint SharedRuntime::_new_instance_ctr=0;
 274 uint SharedRuntime::_new_array_ctr=0;
 275 uint SharedRuntime::_multi2_ctr=0;
 276 uint SharedRuntime::_multi3_ctr=0;
 277 uint SharedRuntime::_multi4_ctr=0;
 278 uint SharedRuntime::_multi5_ctr=0;
 279 uint SharedRuntime::_mon_enter_stub_ctr=0;
 280 uint SharedRuntime::_mon_exit_stub_ctr=0;
 281 uint SharedRuntime::_mon_enter_ctr=0;

 295 uint SharedRuntime::_unsafe_set_memory_ctr=0;
 296 
 297 int     SharedRuntime::_ICmiss_index                    = 0;
 298 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 299 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 300 
 301 
 302 void SharedRuntime::trace_ic_miss(address at) {
 303   for (int i = 0; i < _ICmiss_index; i++) {
 304     if (_ICmiss_at[i] == at) {
 305       _ICmiss_count[i]++;
 306       return;
 307     }
 308   }
 309   int index = _ICmiss_index++;
 310   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 311   _ICmiss_at[index] = at;
 312   _ICmiss_count[index] = 1;
 313 }
 314 
 315 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
 316   if (ICMissHistogram) {
 317     st->print_cr("IC Miss Histogram:");
 318     int tot_misses = 0;
 319     for (int i = 0; i < _ICmiss_index; i++) {
 320       st->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 321       tot_misses += _ICmiss_count[i];
 322     }
 323     st->print_cr("Total IC misses: %7d", tot_misses);
 324   }
 325 }
 326 #endif // !PRODUCT
 327 
 328 
 329 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 330   return x * y;
 331 JRT_END
 332 
 333 
 334 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 335   if (x == min_jlong && y == CONST64(-1)) {
 336     return x;
 337   } else {
 338     return x / y;
 339   }
 340 JRT_END
 341 
 342 
 343 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 344   if (x == min_jlong && y == CONST64(-1)) {
 345     return 0;
 346   } else {

 774   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 775   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 776   JNIHandles::destroy_local(vthread);
 777 JRT_END
 778 #endif // INCLUDE_JVMTI
 779 
 780 // The interpreter code to call this tracing function is only
 781 // called/generated when UL is on for redefine, class and has the right level
 782 // and tags. Since obsolete methods are never compiled, we don't have
 783 // to modify the compilers to generate calls to this function.
 784 //
 785 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 786     JavaThread* thread, Method* method))
 787   if (method->is_obsolete()) {
 788     // We are calling an obsolete method, but this is not necessarily
 789     // an error. Our method could have been redefined just after we
 790     // fetched the Method* from the constant pool.
 791     ResourceMark rm;
 792     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 793   }
 794 
 795   LogStreamHandle(Trace, interpreter, bytecode) log;
 796   if (log.is_enabled()) {
 797     ResourceMark rm;
 798     log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
 799               p2i(thread),
 800               (method->is_static() ? "static" : "virtual"),
 801               method->name_and_sig_as_C_string(),
 802               (method->is_native() ? " native" : ""),
 803               (thread->class_being_initialized() != nullptr ? " clinit" : ""),
 804               (method->method_holder()->is_initialized() ? "" : " being_initialized"));
 805   }
 806   return 0;
 807 JRT_END
 808 
 809 // ret_pc points into caller; we are returning caller's exception handler
 810 // for given exception
 811 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 812 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 813                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 814   assert(nm != nullptr, "must exist");
 815   ResourceMark rm;
 816 
 817 #if INCLUDE_JVMCI
 818   if (nm->is_compiled_by_jvmci()) {
 819     // lookup exception handler for this pc
 820     int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
 821     ExceptionHandlerTable table(nm);
 822     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 823     if (t != nullptr) {
 824       return nm->code_begin() + t->pco();
 825     } else {

1425 
1426   // determine call info & receiver
1427   // note: a) receiver is null for static calls
1428   //       b) an exception is thrown if receiver is null for non-static calls
1429   CallInfo call_info;
1430   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1431   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1432 
1433   NoSafepointVerifier nsv;
1434 
1435   methodHandle callee_method(current, call_info.selected_method());
1436 
1437   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1438          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1439          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1440          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1441          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1442 
1443   assert(!caller_nm->is_unloading(), "It should not be unloading");
1444 

1445   // tracing/debugging/statistics
1446   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1447                  (is_virtual) ? (&_resolve_virtual_ctr) :
1448                                 (&_resolve_static_ctr);
1449   Atomic::inc(addr);
1450 
1451 #ifndef PRODUCT
1452   if (TraceCallFixup) {
1453     ResourceMark rm(current);
1454     tty->print("resolving %s%s (%s) call to",
1455                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1456                Bytecodes::name(invoke_code));
1457     callee_method->print_short_name(tty);
1458     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1459                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1460   }
1461 #endif
1462 
1463   if (invoke_code == Bytecodes::_invokestatic) {
1464     assert(callee_method->method_holder()->is_initialized() ||
1465            callee_method->method_holder()->is_reentrant_initialization(current),
1466            "invalid class initialization state for invoke_static");
1467     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1468       // In order to keep class initialization check, do not patch call
1469       // site for static call when the class is not fully initialized.
1470       // Proper check is enforced by call site re-resolution on every invocation.
1471       //

1487 
1488   // Make sure the callee nmethod does not get deoptimized and removed before
1489   // we are done patching the code.
1490 
1491 
1492   CompiledICLocker ml(caller_nm);
1493   if (is_virtual && !is_optimized) {
1494     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1495     inline_cache->update(&call_info, receiver->klass());
1496   } else {
1497     // Callsite is a direct call - set it to the destination method
1498     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1499     callsite->set(callee_method);
1500   }
1501 
1502   return callee_method;
1503 }
1504 
1505 // Inline caches exist only in compiled code
1506 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1507   PerfTraceTime timer(_perf_ic_miss_total_time);
1508 
1509 #ifdef ASSERT
1510   RegisterMap reg_map(current,
1511                       RegisterMap::UpdateMap::skip,
1512                       RegisterMap::ProcessFrames::include,
1513                       RegisterMap::WalkContinuation::skip);
1514   frame stub_frame = current->last_frame();
1515   assert(stub_frame.is_runtime_frame(), "sanity check");
1516   frame caller_frame = stub_frame.sender(&reg_map);
1517   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1518 #endif /* ASSERT */
1519 
1520   methodHandle callee_method;
1521   JRT_BLOCK
1522     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1523     // Return Method* through TLS
1524     current->set_vm_result_metadata(callee_method());
1525   JRT_BLOCK_END
1526   // return compiled code entry point after potential safepoints
1527   return get_resolved_entry(current, callee_method);
1528 JRT_END
1529 
1530 
1531 // Handle call site that has been made non-entrant
1532 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1533   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1534 
1535   // 6243940 We might end up in here if the callee is deoptimized
1536   // as we race to call it.  We don't want to take a safepoint if
1537   // the caller was interpreted because the caller frame will look
1538   // interpreted to the stack walkers and arguments are now
1539   // "compiled" so it is much better to make this transition
1540   // invisible to the stack walking code. The i2c path will
1541   // place the callee method in the callee_target. It is stashed
1542   // there because if we try and find the callee by normal means a
1543   // safepoint is possible and have trouble gc'ing the compiled args.
1544   RegisterMap reg_map(current,
1545                       RegisterMap::UpdateMap::skip,
1546                       RegisterMap::ProcessFrames::include,
1547                       RegisterMap::WalkContinuation::skip);
1548   frame stub_frame = current->last_frame();
1549   assert(stub_frame.is_runtime_frame(), "sanity check");
1550   frame caller_frame = stub_frame.sender(&reg_map);
1551 
1552   if (caller_frame.is_interpreted_frame() ||
1553       caller_frame.is_entry_frame() ||
1554       caller_frame.is_upcall_stub_frame()) {

1567       // so bypassing it in c2i adapter is benign.
1568       return callee->get_c2i_no_clinit_check_entry();
1569     } else {
1570       return callee->get_c2i_entry();
1571     }
1572   }
1573 
1574   // Must be compiled to compiled path which is safe to stackwalk
1575   methodHandle callee_method;
1576   JRT_BLOCK
1577     // Force resolving of caller (if we called from compiled frame)
1578     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1579     current->set_vm_result_metadata(callee_method());
1580   JRT_BLOCK_END
1581   // return compiled code entry point after potential safepoints
1582   return get_resolved_entry(current, callee_method);
1583 JRT_END
1584 
1585 // Handle abstract method call
1586 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1587   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1588 
1589   // Verbose error message for AbstractMethodError.
1590   // Get the called method from the invoke bytecode.
1591   vframeStream vfst(current, true);
1592   assert(!vfst.at_end(), "Java frame must exist");
1593   methodHandle caller(current, vfst.method());
1594   Bytecode_invoke invoke(caller, vfst.bci());
1595   DEBUG_ONLY( invoke.verify(); )
1596 
1597   // Find the compiled caller frame.
1598   RegisterMap reg_map(current,
1599                       RegisterMap::UpdateMap::include,
1600                       RegisterMap::ProcessFrames::include,
1601                       RegisterMap::WalkContinuation::skip);
1602   frame stubFrame = current->last_frame();
1603   assert(stubFrame.is_runtime_frame(), "must be");
1604   frame callerFrame = stubFrame.sender(&reg_map);
1605   assert(callerFrame.is_compiled_frame(), "must be");
1606 
1607   // Install exception and return forward entry.
1608   address res = SharedRuntime::throw_AbstractMethodError_entry();

1615       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1616     }
1617   JRT_BLOCK_END
1618   return res;
1619 JRT_END
1620 
1621 // return verified_code_entry if interp_only_mode is not set for the current thread;
1622 // otherwise return c2i entry.
1623 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1624   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1625     // In interp_only_mode we need to go to the interpreted entry
1626     // The c2i won't patch in this mode -- see fixup_callers_callsite
1627     return callee_method->get_c2i_entry();
1628   }
1629   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1630   return callee_method->verified_code_entry();
1631 }
1632 
1633 // resolve a static call and patch code
1634 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1635   PerfTraceTime timer(_perf_resolve_static_total_time);
1636 
1637   methodHandle callee_method;
1638   bool enter_special = false;
1639   JRT_BLOCK
1640     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1641     current->set_vm_result_metadata(callee_method());
1642   JRT_BLOCK_END
1643   // return compiled code entry point after potential safepoints
1644   return get_resolved_entry(current, callee_method);
1645 JRT_END
1646 
1647 // resolve virtual call and update inline cache to monomorphic
1648 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1649   PerfTraceTime timer(_perf_resolve_virtual_total_time);
1650 
1651   methodHandle callee_method;
1652   JRT_BLOCK
1653     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1654     current->set_vm_result_metadata(callee_method());
1655   JRT_BLOCK_END
1656   // return compiled code entry point after potential safepoints
1657   return get_resolved_entry(current, callee_method);
1658 JRT_END
1659 
1660 
1661 // Resolve a virtual call that can be statically bound (e.g., always
1662 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1663 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1664   PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1665 
1666   methodHandle callee_method;
1667   JRT_BLOCK
1668     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1669     current->set_vm_result_metadata(callee_method());
1670   JRT_BLOCK_END
1671   // return compiled code entry point after potential safepoints
1672   return get_resolved_entry(current, callee_method);
1673 JRT_END
1674 
1675 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1676   JavaThread* current = THREAD;
1677   ResourceMark rm(current);
1678   CallInfo call_info;
1679   Bytecodes::Code bc;
1680 
1681   // receiver is null for static calls. An exception is thrown for null
1682   // receivers for non-static calls
1683   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1684 
1685   methodHandle callee_method(current, call_info.selected_method());
1686 

1687   Atomic::inc(&_ic_miss_ctr);
1688 
1689 #ifndef PRODUCT
1690   // Statistics & Tracing
1691   if (TraceCallFixup) {
1692     ResourceMark rm(current);
1693     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1694     callee_method->print_short_name(tty);
1695     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1696   }
1697 
1698   if (ICMissHistogram) {
1699     MutexLocker m(VMStatistic_lock);
1700     RegisterMap reg_map(current,
1701                         RegisterMap::UpdateMap::skip,
1702                         RegisterMap::ProcessFrames::include,
1703                         RegisterMap::WalkContinuation::skip);
1704     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1705     // produce statistics under the lock
1706     trace_ic_miss(f.pc());
1707   }
1708 #endif
1709 

1792             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1793             cdc->set_to_clean();
1794             break;
1795           }
1796 
1797           case relocInfo::virtual_call_type: {
1798             // compiled, dispatched call (which used to call an interpreted method)
1799             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1800             inline_cache->set_to_clean();
1801             break;
1802           }
1803           default:
1804             break;
1805         }
1806       }
1807     }
1808   }
1809 
1810   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1811 


1812   Atomic::inc(&_wrong_method_ctr);
1813 
1814 #ifndef PRODUCT
1815   if (TraceCallFixup) {
1816     ResourceMark rm(current);
1817     tty->print("handle_wrong_method reresolving call to");
1818     callee_method->print_short_name(tty);
1819     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1820   }
1821 #endif
1822 
1823   return callee_method;
1824 }
1825 
1826 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1827   // The faulting unsafe accesses should be changed to throw the error
1828   // synchronously instead. Meanwhile the faulting instruction will be
1829   // skipped over (effectively turning it into a no-op) and an
1830   // asynchronous exception will be raised which the thread will
1831   // handle at a later point. If the instruction is a load it will
1832   // return garbage.
1833 
1834   // Request an async exception.

2092 // This is only called when CheckJNICalls is true, and only
2093 // for virtual thread termination.
2094 JRT_LEAF(void,  SharedRuntime::log_jni_monitor_still_held())
2095   assert(CheckJNICalls, "Only call this when checking JNI usage");
2096   if (log_is_enabled(Debug, jni)) {
2097     JavaThread* current = JavaThread::current();
2098     int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2099     int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2100     log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2101                    ") exiting with Objects still locked by JNI MonitorEnter.",
2102                    vthread_id, carrier_id);
2103   }
2104 JRT_END
2105 
2106 #ifndef PRODUCT
2107 
2108 void SharedRuntime::print_statistics() {
2109   ttyLocker ttyl;
2110   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
2111 
2112   SharedRuntime::print_ic_miss_histogram_on(tty);
2113   SharedRuntime::print_counters_on(tty);
2114   AdapterHandlerLibrary::print_statistics_on(tty);
































2115 
2116   if (xtty != nullptr)  xtty->tail("statistics");
2117 }
2118 
2119 //void SharedRuntime::print_counters_on(outputStream* st) {
2120 //  // Dump the JRT_ENTRY counters
2121 //  if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2122 //  if (_new_array_ctr)    st->print_cr("%5u new array requires GC", _new_array_ctr);
2123 //  if (_multi2_ctr)       st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2124 //  if (_multi3_ctr)       st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2125 //  if (_multi4_ctr)       st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2126 //  if (_multi5_ctr)       st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2127 //
2128 //  st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2129 //  st->print_cr("%5u wrong method", _wrong_method_ctr);
2130 //  st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2131 //  st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2132 //  st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2133 //
2134 //  if (_mon_enter_stub_ctr)       st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2135 //  if (_mon_exit_stub_ctr)        st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2136 //  if (_mon_enter_ctr)            st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2137 //  if (_mon_exit_ctr)             st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2138 //  if (_partial_subtype_ctr)      st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2139 //  if (_jbyte_array_copy_ctr)     st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2140 //  if (_jshort_array_copy_ctr)    st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2141 //  if (_jint_array_copy_ctr)      st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2142 //  if (_jlong_array_copy_ctr)     st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2143 //  if (_oop_array_copy_ctr)       st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2144 //  if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2145 //  if (_unsafe_array_copy_ctr)    st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2146 //  if (_generic_array_copy_ctr)   st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2147 //  if (_slow_array_copy_ctr)      st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2148 //  if (_find_handler_ctr)         st->print_cr("%5u find exception handler", _find_handler_ctr);
2149 //  if (_rethrow_ctr)              st->print_cr("%5u rethrow handler", _rethrow_ctr);
2150 //  if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2151 //}
2152 
2153 inline double percent(int64_t x, int64_t y) {
2154   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2155 }
2156 
2157 class MethodArityHistogram {
2158  public:
2159   enum { MAX_ARITY = 256 };
2160  private:
2161   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2162   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2163   static uint64_t _total_compiled_calls;
2164   static uint64_t _max_compiled_calls_per_method;
2165   static int _max_arity;                       // max. arity seen
2166   static int _max_size;                        // max. arg size seen
2167 
2168   static void add_method_to_histogram(nmethod* nm) {
2169     Method* method = (nm == nullptr) ? nullptr : nm->method();
2170     if (method != nullptr) {
2171       ArgumentCount args(method->signature());
2172       int arity   = args.size() + (method->is_static() ? 0 : 1);

2217     // Take the Compile_lock to protect against changes in the CodeBlob structures
2218     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2219     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2220     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2221     _max_arity = _max_size = 0;
2222     _total_compiled_calls = 0;
2223     _max_compiled_calls_per_method = 0;
2224     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2225     CodeCache::nmethods_do(add_method_to_histogram);
2226     print_histogram();
2227   }
2228 };
2229 
2230 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2231 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2232 uint64_t MethodArityHistogram::_total_compiled_calls;
2233 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2234 int MethodArityHistogram::_max_arity;
2235 int MethodArityHistogram::_max_size;
2236 
2237 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2238   tty->print_cr("Calls from compiled code:");
2239   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2240   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2241   int64_t mono_i = _nof_interface_calls;
2242   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2243   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2244   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2245   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2246   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2247   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2248   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2249   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2250   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2251   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2252   tty->cr();
2253   tty->print_cr("Note 1: counter updates are not MT-safe.");
2254   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2255   tty->print_cr("        %% in nested categories are relative to their category");
2256   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2257   tty->cr();

2561   }
2562 #endif // INCLUDE_CDS
2563   if (entry == nullptr) {
2564     assert_lock_strong(AdapterHandlerLibrary_lock);
2565     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2566     if (entry_p != nullptr) {
2567       entry = *entry_p;
2568       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2569              entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2570              fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2571   #ifndef PRODUCT
2572       _runtime_hits++;
2573   #endif
2574     }
2575   }
2576   AdapterFingerPrint::deallocate(fp);
2577   return entry;
2578 }
2579 
2580 #ifndef PRODUCT
2581 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2582   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2583     return sizeof(*key) + sizeof(*a);
2584   };
2585   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2586   ts.print(st, "AdapterHandlerTable");
2587   st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2588                _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2589   int total_hits = _archived_hits + _runtime_hits;
2590   st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2591                _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2592 }
2593 #endif // !PRODUCT
2594 
2595 // ---------------------------------------------------------------------------
2596 // Implementation of AdapterHandlerLibrary
2597 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2598 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2599 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2600 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2601 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2602 #if INCLUDE_CDS
2603 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2604 #endif // INCLUDE_CDS
2605 static const int AdapterHandlerLibrary_size = 16*K;
2606 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2607 
2608 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2609   assert(_buffer != nullptr, "should be initialized");
2610   return _buffer;
2611 }
2612 
2613 static void post_adapter_creation(const AdapterBlob* new_adapter,

3536   assert(found, "Should have found handler");
3537 }
3538 
3539 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3540   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3541   if (get_i2c_entry() != nullptr) {
3542     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3543   }
3544   if (get_c2i_entry() != nullptr) {
3545     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3546   }
3547   if (get_c2i_unverified_entry() != nullptr) {
3548     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3549   }
3550   if (get_c2i_no_clinit_check_entry() != nullptr) {
3551     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3552   }
3553   st->cr();
3554 }
3555 








3556 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3557   assert(current == JavaThread::current(), "pre-condition");
3558   StackOverflow* overflow_state = current->stack_overflow_state();
3559   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3560   overflow_state->set_reserved_stack_activation(current->stack_base());
3561 JRT_END
3562 
3563 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3564   ResourceMark rm(current);
3565   frame activation;
3566   nmethod* nm = nullptr;
3567   int count = 1;
3568 
3569   assert(fr.is_java_frame(), "Must start on Java frame");
3570 
3571   RegisterMap map(JavaThread::current(),
3572                   RegisterMap::UpdateMap::skip,
3573                   RegisterMap::ProcessFrames::skip,
3574                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3575   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
< prev index next >