< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

  49 #include "metaprogramming/primitiveConversions.hpp"
  50 #include "oops/klass.hpp"
  51 #include "oops/method.inline.hpp"
  52 #include "oops/objArrayKlass.hpp"
  53 #include "oops/oop.inline.hpp"
  54 #include "prims/forte.hpp"
  55 #include "prims/jvmtiExport.hpp"
  56 #include "prims/jvmtiThreadState.hpp"
  57 #include "prims/methodHandles.hpp"
  58 #include "prims/nativeLookup.hpp"
  59 #include "runtime/arguments.hpp"
  60 #include "runtime/atomic.hpp"
  61 #include "runtime/basicLock.inline.hpp"
  62 #include "runtime/frame.inline.hpp"
  63 #include "runtime/handles.inline.hpp"
  64 #include "runtime/init.hpp"
  65 #include "runtime/interfaceSupport.inline.hpp"
  66 #include "runtime/java.hpp"
  67 #include "runtime/javaCalls.hpp"
  68 #include "runtime/jniHandles.inline.hpp"
  69 #include "runtime/perfData.hpp"
  70 #include "runtime/sharedRuntime.hpp"
  71 #include "runtime/stackWatermarkSet.hpp"
  72 #include "runtime/stubRoutines.hpp"
  73 #include "runtime/synchronizer.inline.hpp"
  74 #include "runtime/timerTrace.hpp"
  75 #include "runtime/vframe.inline.hpp"
  76 #include "runtime/vframeArray.hpp"
  77 #include "runtime/vm_version.hpp"

  78 #include "utilities/copy.hpp"
  79 #include "utilities/dtrace.hpp"
  80 #include "utilities/events.hpp"
  81 #include "utilities/globalDefinitions.hpp"
  82 #include "utilities/resourceHash.hpp"
  83 #include "utilities/macros.hpp"
  84 #include "utilities/xmlstream.hpp"
  85 #ifdef COMPILER1
  86 #include "c1/c1_Runtime1.hpp"
  87 #endif
  88 #if INCLUDE_JFR
  89 #include "jfr/jfr.hpp"
  90 #endif
  91 
  92 // Shared runtime stub routines reside in their own unique blob with a
  93 // single entry point
  94 
  95 
  96 #define SHARED_STUB_FIELD_DEFINE(name, type) \
  97   type        SharedRuntime::BLOB_FIELD_NAME(name);
  98   SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
  99 #undef SHARED_STUB_FIELD_DEFINE
 100 
 101 nmethod*            SharedRuntime::_cont_doYield_stub;
 102 






 103 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
 104 const char *SharedRuntime::_stub_names[] = {
 105   SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
 106 };
 107 
 108 //----------------------------generate_stubs-----------------------------------
 109 void SharedRuntime::generate_initial_stubs() {
 110   // Build this early so it's available for the interpreter.
 111   _throw_StackOverflowError_blob =
 112     generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
 113                              CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
 114 }
 115 
 116 void SharedRuntime::generate_stubs() {
 117   _wrong_method_blob =
 118     generate_resolve_blob(SharedStubId::wrong_method_id,
 119                           CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
 120   _wrong_method_abstract_blob =
 121     generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
 122                           CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));

 151 
 152   AdapterHandlerLibrary::initialize();
 153 
 154 #if COMPILER2_OR_JVMCI
 155   // Vectors are generated only by C2 and JVMCI.
 156   bool support_wide = is_wide_vector(MaxVectorSize);
 157   if (support_wide) {
 158     _polling_page_vectors_safepoint_handler_blob =
 159       generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
 160                             CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 161   }
 162 #endif // COMPILER2_OR_JVMCI
 163   _polling_page_safepoint_handler_blob =
 164     generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
 165                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 166   _polling_page_return_handler_blob =
 167     generate_handler_blob(SharedStubId::polling_page_return_handler_id,
 168                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 169 
 170   generate_deopt_blob();




















































 171 }
 172 
 173 #if INCLUDE_JFR
 174 //------------------------------generate jfr runtime stubs ------
 175 void SharedRuntime::generate_jfr_stubs() {
 176   ResourceMark rm;
 177   const char* timer_msg = "SharedRuntime generate_jfr_stubs";
 178   TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
 179 
 180   _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
 181   _jfr_return_lease_blob = generate_jfr_return_lease();
 182 }
 183 
 184 #endif // INCLUDE_JFR
 185 
 186 #include <math.h>
 187 
 188 // Implementation of SharedRuntime
 189 
 190 #ifndef PRODUCT
 191 // For statistics
 192 uint SharedRuntime::_ic_miss_ctr = 0;
 193 uint SharedRuntime::_wrong_method_ctr = 0;
 194 uint SharedRuntime::_resolve_static_ctr = 0;
 195 uint SharedRuntime::_resolve_virtual_ctr = 0;
 196 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;


 197 uint SharedRuntime::_implicit_null_throws = 0;
 198 uint SharedRuntime::_implicit_div0_throws = 0;
 199 
 200 int64_t SharedRuntime::_nof_normal_calls = 0;
 201 int64_t SharedRuntime::_nof_inlined_calls = 0;
 202 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 203 int64_t SharedRuntime::_nof_static_calls = 0;
 204 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 205 int64_t SharedRuntime::_nof_interface_calls = 0;
 206 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 207 
 208 uint SharedRuntime::_new_instance_ctr=0;
 209 uint SharedRuntime::_new_array_ctr=0;
 210 uint SharedRuntime::_multi2_ctr=0;
 211 uint SharedRuntime::_multi3_ctr=0;
 212 uint SharedRuntime::_multi4_ctr=0;
 213 uint SharedRuntime::_multi5_ctr=0;
 214 uint SharedRuntime::_mon_enter_stub_ctr=0;
 215 uint SharedRuntime::_mon_exit_stub_ctr=0;
 216 uint SharedRuntime::_mon_enter_ctr=0;

 230 uint SharedRuntime::_unsafe_set_memory_ctr=0;
 231 
 232 int     SharedRuntime::_ICmiss_index                    = 0;
 233 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 234 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 235 
 236 
 237 void SharedRuntime::trace_ic_miss(address at) {
 238   for (int i = 0; i < _ICmiss_index; i++) {
 239     if (_ICmiss_at[i] == at) {
 240       _ICmiss_count[i]++;
 241       return;
 242     }
 243   }
 244   int index = _ICmiss_index++;
 245   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 246   _ICmiss_at[index] = at;
 247   _ICmiss_count[index] = 1;
 248 }
 249 
 250 void SharedRuntime::print_ic_miss_histogram() {
 251   if (ICMissHistogram) {
 252     tty->print_cr("IC Miss Histogram:");
 253     int tot_misses = 0;
 254     for (int i = 0; i < _ICmiss_index; i++) {
 255       tty->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 256       tot_misses += _ICmiss_count[i];
 257     }
 258     tty->print_cr("Total IC misses: %7d", tot_misses);
 259   }
 260 }
 261 #endif // PRODUCT
 262 
 263 
 264 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 265   return x * y;
 266 JRT_END
 267 
 268 
 269 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 270   if (x == min_jlong && y == CONST64(-1)) {
 271     return x;
 272   } else {
 273     return x / y;
 274   }
 275 JRT_END
 276 
 277 
 278 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 279   if (x == min_jlong && y == CONST64(-1)) {
 280     return 0;
 281   } else {

 708   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 709   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 710   JNIHandles::destroy_local(vthread);
 711 JRT_END
 712 #endif // INCLUDE_JVMTI
 713 
 714 // The interpreter code to call this tracing function is only
 715 // called/generated when UL is on for redefine, class and has the right level
 716 // and tags. Since obsolete methods are never compiled, we don't have
 717 // to modify the compilers to generate calls to this function.
 718 //
 719 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 720     JavaThread* thread, Method* method))
 721   if (method->is_obsolete()) {
 722     // We are calling an obsolete method, but this is not necessarily
 723     // an error. Our method could have been redefined just after we
 724     // fetched the Method* from the constant pool.
 725     ResourceMark rm;
 726     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 727   }












 728   return 0;
 729 JRT_END
 730 
 731 // ret_pc points into caller; we are returning caller's exception handler
 732 // for given exception
 733 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 734 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 735                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 736   assert(nm != nullptr, "must exist");
 737   ResourceMark rm;
 738 
 739 #if INCLUDE_JVMCI
 740   if (nm->is_compiled_by_jvmci()) {
 741     // lookup exception handler for this pc
 742     int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
 743     ExceptionHandlerTable table(nm);
 744     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 745     if (t != nullptr) {
 746       return nm->code_begin() + t->pco();
 747     } else {

1347 
1348   // determine call info & receiver
1349   // note: a) receiver is null for static calls
1350   //       b) an exception is thrown if receiver is null for non-static calls
1351   CallInfo call_info;
1352   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1353   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1354 
1355   NoSafepointVerifier nsv;
1356 
1357   methodHandle callee_method(current, call_info.selected_method());
1358 
1359   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1360          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1361          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1362          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1363          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1364 
1365   assert(!caller_nm->is_unloading(), "It should not be unloading");
1366 
1367 #ifndef PRODUCT
1368   // tracing/debugging/statistics
1369   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1370                  (is_virtual) ? (&_resolve_virtual_ctr) :
1371                                 (&_resolve_static_ctr);
1372   Atomic::inc(addr);
1373 

1374   if (TraceCallFixup) {
1375     ResourceMark rm(current);
1376     tty->print("resolving %s%s (%s) call to",
1377                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1378                Bytecodes::name(invoke_code));
1379     callee_method->print_short_name(tty);
1380     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1381                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1382   }
1383 #endif
1384 
1385   if (invoke_code == Bytecodes::_invokestatic) {
1386     assert(callee_method->method_holder()->is_initialized() ||
1387            callee_method->method_holder()->is_reentrant_initialization(current),
1388            "invalid class initialization state for invoke_static");
1389     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1390       // In order to keep class initialization check, do not patch call
1391       // site for static call when the class is not fully initialized.
1392       // Proper check is enforced by call site re-resolution on every invocation.
1393       //

1409 
1410   // Make sure the callee nmethod does not get deoptimized and removed before
1411   // we are done patching the code.
1412 
1413 
1414   CompiledICLocker ml(caller_nm);
1415   if (is_virtual && !is_optimized) {
1416     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1417     inline_cache->update(&call_info, receiver->klass());
1418   } else {
1419     // Callsite is a direct call - set it to the destination method
1420     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1421     callsite->set(callee_method);
1422   }
1423 
1424   return callee_method;
1425 }
1426 
1427 // Inline caches exist only in compiled code
1428 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))


1429 #ifdef ASSERT
1430   RegisterMap reg_map(current,
1431                       RegisterMap::UpdateMap::skip,
1432                       RegisterMap::ProcessFrames::include,
1433                       RegisterMap::WalkContinuation::skip);
1434   frame stub_frame = current->last_frame();
1435   assert(stub_frame.is_runtime_frame(), "sanity check");
1436   frame caller_frame = stub_frame.sender(&reg_map);
1437   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1438 #endif /* ASSERT */
1439 
1440   methodHandle callee_method;
1441   JRT_BLOCK
1442     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1443     // Return Method* through TLS
1444     current->set_vm_result_2(callee_method());
1445   JRT_BLOCK_END
1446   // return compiled code entry point after potential safepoints
1447   return get_resolved_entry(current, callee_method);
1448 JRT_END
1449 
1450 
1451 // Handle call site that has been made non-entrant
1452 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))


1453   // 6243940 We might end up in here if the callee is deoptimized
1454   // as we race to call it.  We don't want to take a safepoint if
1455   // the caller was interpreted because the caller frame will look
1456   // interpreted to the stack walkers and arguments are now
1457   // "compiled" so it is much better to make this transition
1458   // invisible to the stack walking code. The i2c path will
1459   // place the callee method in the callee_target. It is stashed
1460   // there because if we try and find the callee by normal means a
1461   // safepoint is possible and have trouble gc'ing the compiled args.
1462   RegisterMap reg_map(current,
1463                       RegisterMap::UpdateMap::skip,
1464                       RegisterMap::ProcessFrames::include,
1465                       RegisterMap::WalkContinuation::skip);
1466   frame stub_frame = current->last_frame();
1467   assert(stub_frame.is_runtime_frame(), "sanity check");
1468   frame caller_frame = stub_frame.sender(&reg_map);
1469 
1470   if (caller_frame.is_interpreted_frame() ||
1471       caller_frame.is_entry_frame() ||
1472       caller_frame.is_upcall_stub_frame()) {

1485       // so bypassing it in c2i adapter is benign.
1486       return callee->get_c2i_no_clinit_check_entry();
1487     } else {
1488       return callee->get_c2i_entry();
1489     }
1490   }
1491 
1492   // Must be compiled to compiled path which is safe to stackwalk
1493   methodHandle callee_method;
1494   JRT_BLOCK
1495     // Force resolving of caller (if we called from compiled frame)
1496     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1497     current->set_vm_result_2(callee_method());
1498   JRT_BLOCK_END
1499   // return compiled code entry point after potential safepoints
1500   return get_resolved_entry(current, callee_method);
1501 JRT_END
1502 
1503 // Handle abstract method call
1504 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))


1505   // Verbose error message for AbstractMethodError.
1506   // Get the called method from the invoke bytecode.
1507   vframeStream vfst(current, true);
1508   assert(!vfst.at_end(), "Java frame must exist");
1509   methodHandle caller(current, vfst.method());
1510   Bytecode_invoke invoke(caller, vfst.bci());
1511   DEBUG_ONLY( invoke.verify(); )
1512 
1513   // Find the compiled caller frame.
1514   RegisterMap reg_map(current,
1515                       RegisterMap::UpdateMap::include,
1516                       RegisterMap::ProcessFrames::include,
1517                       RegisterMap::WalkContinuation::skip);
1518   frame stubFrame = current->last_frame();
1519   assert(stubFrame.is_runtime_frame(), "must be");
1520   frame callerFrame = stubFrame.sender(&reg_map);
1521   assert(callerFrame.is_compiled_frame(), "must be");
1522 
1523   // Install exception and return forward entry.
1524   address res = SharedRuntime::throw_AbstractMethodError_entry();

1531       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1532     }
1533   JRT_BLOCK_END
1534   return res;
1535 JRT_END
1536 
1537 // return verified_code_entry if interp_only_mode is not set for the current thread;
1538 // otherwise return c2i entry.
1539 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1540   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1541     // In interp_only_mode we need to go to the interpreted entry
1542     // The c2i won't patch in this mode -- see fixup_callers_callsite
1543     return callee_method->get_c2i_entry();
1544   }
1545   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1546   return callee_method->verified_code_entry();
1547 }
1548 
1549 // resolve a static call and patch code
1550 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))


1551   methodHandle callee_method;
1552   bool enter_special = false;
1553   JRT_BLOCK
1554     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1555     current->set_vm_result_2(callee_method());
1556   JRT_BLOCK_END
1557   // return compiled code entry point after potential safepoints
1558   return get_resolved_entry(current, callee_method);
1559 JRT_END
1560 
1561 // resolve virtual call and update inline cache to monomorphic
1562 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))


1563   methodHandle callee_method;
1564   JRT_BLOCK
1565     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1566     current->set_vm_result_2(callee_method());
1567   JRT_BLOCK_END
1568   // return compiled code entry point after potential safepoints
1569   return get_resolved_entry(current, callee_method);
1570 JRT_END
1571 
1572 
1573 // Resolve a virtual call that can be statically bound (e.g., always
1574 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1575 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))


1576   methodHandle callee_method;
1577   JRT_BLOCK
1578     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1579     current->set_vm_result_2(callee_method());
1580   JRT_BLOCK_END
1581   // return compiled code entry point after potential safepoints
1582   return get_resolved_entry(current, callee_method);
1583 JRT_END
1584 
1585 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1586   JavaThread* current = THREAD;
1587   ResourceMark rm(current);
1588   CallInfo call_info;
1589   Bytecodes::Code bc;
1590 
1591   // receiver is null for static calls. An exception is thrown for null
1592   // receivers for non-static calls
1593   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1594 
1595   methodHandle callee_method(current, call_info.selected_method());
1596 
1597 #ifndef PRODUCT
1598   Atomic::inc(&_ic_miss_ctr);
1599 

1600   // Statistics & Tracing
1601   if (TraceCallFixup) {
1602     ResourceMark rm(current);
1603     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1604     callee_method->print_short_name(tty);
1605     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1606   }
1607 
1608   if (ICMissHistogram) {
1609     MutexLocker m(VMStatistic_lock);
1610     RegisterMap reg_map(current,
1611                         RegisterMap::UpdateMap::skip,
1612                         RegisterMap::ProcessFrames::include,
1613                         RegisterMap::WalkContinuation::skip);
1614     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1615     // produce statistics under the lock
1616     trace_ic_miss(f.pc());
1617   }
1618 #endif
1619 

1702             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1703             cdc->set_to_clean();
1704             break;
1705           }
1706 
1707           case relocInfo::virtual_call_type: {
1708             // compiled, dispatched call (which used to call an interpreted method)
1709             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1710             inline_cache->set_to_clean();
1711             break;
1712           }
1713           default:
1714             break;
1715         }
1716       }
1717     }
1718   }
1719 
1720   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1721 
1722 
1723 #ifndef PRODUCT
1724   Atomic::inc(&_wrong_method_ctr);
1725 

1726   if (TraceCallFixup) {
1727     ResourceMark rm(current);
1728     tty->print("handle_wrong_method reresolving call to");
1729     callee_method->print_short_name(tty);
1730     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1731   }
1732 #endif
1733 
1734   return callee_method;
1735 }
1736 
1737 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1738   // The faulting unsafe accesses should be changed to throw the error
1739   // synchronously instead. Meanwhile the faulting instruction will be
1740   // skipped over (effectively turning it into a no-op) and an
1741   // asynchronous exception will be raised which the thread will
1742   // handle at a later point. If the instruction is a load it will
1743   // return garbage.
1744 
1745   // Request an async exception.

2003 // This is only called when CheckJNICalls is true, and only
2004 // for virtual thread termination.
2005 JRT_LEAF(void,  SharedRuntime::log_jni_monitor_still_held())
2006   assert(CheckJNICalls, "Only call this when checking JNI usage");
2007   if (log_is_enabled(Debug, jni)) {
2008     JavaThread* current = JavaThread::current();
2009     int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2010     int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2011     log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2012                    ") exiting with Objects still locked by JNI MonitorEnter.",
2013                    vthread_id, carrier_id);
2014   }
2015 JRT_END
2016 
2017 #ifndef PRODUCT
2018 
2019 void SharedRuntime::print_statistics() {
2020   ttyLocker ttyl;
2021   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
2022 
2023   SharedRuntime::print_ic_miss_histogram();
2024 
2025   // Dump the JRT_ENTRY counters
2026   if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2027   if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2028   if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2029   if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2030   if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2031   if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2032 
2033   tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2034   tty->print_cr("%5u wrong method", _wrong_method_ctr);
2035   tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2036   tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2037   tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2038 
2039   if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2040   if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2041   if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2042   if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2043   if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2044   if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2045   if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2046   if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2047   if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2048   if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2049   if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2050   if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2051   if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2052   if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2053   if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2054   if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2055   if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2056 
2057   AdapterHandlerLibrary::print_statistics();
2058 
2059   if (xtty != nullptr)  xtty->tail("statistics");
2060 }
2061 


































2062 inline double percent(int64_t x, int64_t y) {
2063   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2064 }
2065 
2066 class MethodArityHistogram {
2067  public:
2068   enum { MAX_ARITY = 256 };
2069  private:
2070   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2071   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2072   static uint64_t _total_compiled_calls;
2073   static uint64_t _max_compiled_calls_per_method;
2074   static int _max_arity;                       // max. arity seen
2075   static int _max_size;                        // max. arg size seen
2076 
2077   static void add_method_to_histogram(nmethod* nm) {
2078     Method* method = (nm == nullptr) ? nullptr : nm->method();
2079     if (method != nullptr) {
2080       ArgumentCount args(method->signature());
2081       int arity   = args.size() + (method->is_static() ? 0 : 1);

2126     // Take the Compile_lock to protect against changes in the CodeBlob structures
2127     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2128     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2129     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2130     _max_arity = _max_size = 0;
2131     _total_compiled_calls = 0;
2132     _max_compiled_calls_per_method = 0;
2133     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2134     CodeCache::nmethods_do(add_method_to_histogram);
2135     print_histogram();
2136   }
2137 };
2138 
2139 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2140 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2141 uint64_t MethodArityHistogram::_total_compiled_calls;
2142 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2143 int MethodArityHistogram::_max_arity;
2144 int MethodArityHistogram::_max_size;
2145 
2146 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2147   tty->print_cr("Calls from compiled code:");
2148   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2149   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2150   int64_t mono_i = _nof_interface_calls;
2151   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2152   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2153   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2154   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2155   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2156   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2157   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2158   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2159   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2160   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2161   tty->cr();
2162   tty->print_cr("Note 1: counter updates are not MT-safe.");
2163   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2164   tty->print_cr("        %% in nested categories are relative to their category");
2165   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2166   tty->cr();

2379                   AdapterFingerPrint::equals>;
2380 static AdapterHandlerTable* _adapter_handler_table;
2381 
2382 // Find a entry with the same fingerprint if it exists
2383 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2384   NOT_PRODUCT(_lookups++);
2385   assert_lock_strong(AdapterHandlerLibrary_lock);
2386   AdapterFingerPrint fp(total_args_passed, sig_bt);
2387   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2388   if (entry != nullptr) {
2389 #ifndef PRODUCT
2390     if (fp.is_compact()) _compact++;
2391     _hits++;
2392 #endif
2393     return *entry;
2394   }
2395   return nullptr;
2396 }
2397 
2398 #ifndef PRODUCT
2399 static void print_table_statistics() {
2400   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2401     return sizeof(*key) + sizeof(*a);
2402   };
2403   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2404   ts.print(tty, "AdapterHandlerTable");
2405   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2406                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2407   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2408                 _lookups, _equals, _hits, _compact);
2409 }
2410 #endif
2411 
2412 // ---------------------------------------------------------------------------
2413 // Implementation of AdapterHandlerLibrary
2414 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2415 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2416 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2417 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2418 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2419 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2420 const int AdapterHandlerLibrary_size = 16*K;
2421 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2422 
2423 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2424   return _buffer;
2425 }
2426 
2427 static void post_adapter_creation(const AdapterBlob* new_adapter,
2428                                   const AdapterHandlerEntry* entry) {
2429   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2430     char blob_id[256];

3078   assert(found, "Should have found handler");
3079 }
3080 
3081 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3082   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3083   if (get_i2c_entry() != nullptr) {
3084     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3085   }
3086   if (get_c2i_entry() != nullptr) {
3087     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3088   }
3089   if (get_c2i_unverified_entry() != nullptr) {
3090     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3091   }
3092   if (get_c2i_no_clinit_check_entry() != nullptr) {
3093     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3094   }
3095   st->cr();
3096 }
3097 
3098 #ifndef PRODUCT
3099 
3100 void AdapterHandlerLibrary::print_statistics() {
3101   print_table_statistics();
3102 }
3103 
3104 #endif /* PRODUCT */
3105 
3106 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3107   assert(current == JavaThread::current(), "pre-condition");
3108   StackOverflow* overflow_state = current->stack_overflow_state();
3109   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3110   overflow_state->set_reserved_stack_activation(current->stack_base());
3111 JRT_END
3112 
3113 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3114   ResourceMark rm(current);
3115   frame activation;
3116   nmethod* nm = nullptr;
3117   int count = 1;
3118 
3119   assert(fr.is_java_frame(), "Must start on Java frame");
3120 
3121   RegisterMap map(JavaThread::current(),
3122                   RegisterMap::UpdateMap::skip,
3123                   RegisterMap::ProcessFrames::skip,
3124                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3125   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {

  49 #include "metaprogramming/primitiveConversions.hpp"
  50 #include "oops/klass.hpp"
  51 #include "oops/method.inline.hpp"
  52 #include "oops/objArrayKlass.hpp"
  53 #include "oops/oop.inline.hpp"
  54 #include "prims/forte.hpp"
  55 #include "prims/jvmtiExport.hpp"
  56 #include "prims/jvmtiThreadState.hpp"
  57 #include "prims/methodHandles.hpp"
  58 #include "prims/nativeLookup.hpp"
  59 #include "runtime/arguments.hpp"
  60 #include "runtime/atomic.hpp"
  61 #include "runtime/basicLock.inline.hpp"
  62 #include "runtime/frame.inline.hpp"
  63 #include "runtime/handles.inline.hpp"
  64 #include "runtime/init.hpp"
  65 #include "runtime/interfaceSupport.inline.hpp"
  66 #include "runtime/java.hpp"
  67 #include "runtime/javaCalls.hpp"
  68 #include "runtime/jniHandles.inline.hpp"
  69 #include "runtime/perfData.inline.hpp"
  70 #include "runtime/sharedRuntime.hpp"
  71 #include "runtime/stackWatermarkSet.hpp"
  72 #include "runtime/stubRoutines.hpp"
  73 #include "runtime/synchronizer.inline.hpp"
  74 #include "runtime/timerTrace.hpp"
  75 #include "runtime/vframe.inline.hpp"
  76 #include "runtime/vframeArray.hpp"
  77 #include "runtime/vm_version.hpp"
  78 #include "services/management.hpp"
  79 #include "utilities/copy.hpp"
  80 #include "utilities/dtrace.hpp"
  81 #include "utilities/events.hpp"
  82 #include "utilities/globalDefinitions.hpp"
  83 #include "utilities/resourceHash.hpp"
  84 #include "utilities/macros.hpp"
  85 #include "utilities/xmlstream.hpp"
  86 #ifdef COMPILER1
  87 #include "c1/c1_Runtime1.hpp"
  88 #endif
  89 #if INCLUDE_JFR
  90 #include "jfr/jfr.hpp"
  91 #endif
  92 
  93 // Shared runtime stub routines reside in their own unique blob with a
  94 // single entry point
  95 
  96 
  97 #define SHARED_STUB_FIELD_DEFINE(name, type) \
  98   type        SharedRuntime::BLOB_FIELD_NAME(name);
  99   SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
 100 #undef SHARED_STUB_FIELD_DEFINE
 101 
 102 nmethod*            SharedRuntime::_cont_doYield_stub;
 103 
 104 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
 105 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time     = nullptr;
 106 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time      = nullptr;
 107 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
 108 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time             = nullptr;
 109 
 110 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
 111 const char *SharedRuntime::_stub_names[] = {
 112   SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
 113 };
 114 
 115 //----------------------------generate_stubs-----------------------------------
 116 void SharedRuntime::generate_initial_stubs() {
 117   // Build this early so it's available for the interpreter.
 118   _throw_StackOverflowError_blob =
 119     generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
 120                              CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
 121 }
 122 
 123 void SharedRuntime::generate_stubs() {
 124   _wrong_method_blob =
 125     generate_resolve_blob(SharedStubId::wrong_method_id,
 126                           CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
 127   _wrong_method_abstract_blob =
 128     generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
 129                           CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));

 158 
 159   AdapterHandlerLibrary::initialize();
 160 
 161 #if COMPILER2_OR_JVMCI
 162   // Vectors are generated only by C2 and JVMCI.
 163   bool support_wide = is_wide_vector(MaxVectorSize);
 164   if (support_wide) {
 165     _polling_page_vectors_safepoint_handler_blob =
 166       generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
 167                             CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 168   }
 169 #endif // COMPILER2_OR_JVMCI
 170   _polling_page_safepoint_handler_blob =
 171     generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
 172                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 173   _polling_page_return_handler_blob =
 174     generate_handler_blob(SharedStubId::polling_page_return_handler_id,
 175                           CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
 176 
 177   generate_deopt_blob();
 178 
 179   if (UsePerfData) {
 180     EXCEPTION_MARK;
 181     NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
 182     NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time,     SUN_CI, "resovle_virtual_call");
 183     NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time,      SUN_CI, "resovle_static_call");
 184     NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
 185     NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time ,            SUN_CI, "ic_miss");
 186     if (HAS_PENDING_EXCEPTION) {
 187       vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
 188     }
 189   }
 190 }
 191 
 192 void SharedRuntime::print_counters_on(outputStream* st) {
 193   st->print_cr("SharedRuntime:");
 194   if (UsePerfData) {
 195     st->print_cr("  resolve_opt_virtual_call: " JLONG_FORMAT_W(6) "us (elapsed) " JLONG_FORMAT_W(6) "us (thread) / %5d events",
 196                  _perf_resolve_opt_virtual_total_time->elapsed_counter_value_us(),
 197                  _perf_resolve_opt_virtual_total_time->thread_counter_value_us(),
 198                  _resolve_opt_virtual_ctr);
 199     st->print_cr("  resolve_virtual_call:     " JLONG_FORMAT_W(6) "us (elapsed) " JLONG_FORMAT_W(6) "us (thread) / %5d events",
 200                  _perf_resolve_virtual_total_time->elapsed_counter_value_us(),
 201                  _perf_resolve_virtual_total_time->thread_counter_value_us(),
 202                  _resolve_virtual_ctr);
 203     st->print_cr("  resolve_static_call:      " JLONG_FORMAT_W(6) "us (elapsed) " JLONG_FORMAT_W(6) "us (thread) / %5d events",
 204                  _perf_resolve_static_total_time->elapsed_counter_value_us(),
 205                  _perf_resolve_static_total_time->thread_counter_value_us(),
 206                  _resolve_static_ctr);
 207     st->print_cr("  handle_wrong_method:      " JLONG_FORMAT_W(6) "us (elapsed) " JLONG_FORMAT_W(6) "us (thread) / %5d events",
 208                  _perf_handle_wrong_method_total_time->elapsed_counter_value_us(),
 209                  _perf_handle_wrong_method_total_time->thread_counter_value_us(),
 210                  _wrong_method_ctr);
 211     st->print_cr("  ic_miss:                  " JLONG_FORMAT_W(6) "us (elapsed) " JLONG_FORMAT_W(6) "us (thread) / %5d events",
 212                  _perf_ic_miss_total_time->elapsed_counter_value_us(),
 213                  _perf_ic_miss_total_time->thread_counter_value_us(),
 214                  _ic_miss_ctr);
 215 
 216     jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
 217                                                           _perf_resolve_virtual_total_time->elapsed_counter_value() +
 218                                                           _perf_resolve_static_total_time->elapsed_counter_value() +
 219                                                           _perf_handle_wrong_method_total_time->elapsed_counter_value() +
 220                                                           _perf_ic_miss_total_time->elapsed_counter_value());
 221     jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
 222                                                           _perf_resolve_virtual_total_time->thread_counter_value() +
 223                                                           _perf_resolve_static_total_time->thread_counter_value() +
 224                                                           _perf_handle_wrong_method_total_time->thread_counter_value() +
 225                                                           _perf_ic_miss_total_time->thread_counter_value());
 226     st->print_cr("Total:                      " JLONG_FORMAT_W(5) "us (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_elapsed_time_us, total_thread_time_us);
 227   } else {
 228     st->print_cr("  no data (UsePerfData is turned off)");
 229   }
 230 }
 231 
 232 #if INCLUDE_JFR
 233 //------------------------------generate jfr runtime stubs ------
 234 void SharedRuntime::generate_jfr_stubs() {
 235   ResourceMark rm;
 236   const char* timer_msg = "SharedRuntime generate_jfr_stubs";
 237   TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
 238 
 239   _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
 240   _jfr_return_lease_blob = generate_jfr_return_lease();
 241 }
 242 
 243 #endif // INCLUDE_JFR
 244 
 245 #include <math.h>
 246 
 247 // Implementation of SharedRuntime
 248 

 249 // For statistics
 250 uint SharedRuntime::_ic_miss_ctr = 0;
 251 uint SharedRuntime::_wrong_method_ctr = 0;
 252 uint SharedRuntime::_resolve_static_ctr = 0;
 253 uint SharedRuntime::_resolve_virtual_ctr = 0;
 254 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
 255 
 256 #ifndef PRODUCT
 257 uint SharedRuntime::_implicit_null_throws = 0;
 258 uint SharedRuntime::_implicit_div0_throws = 0;
 259 
 260 int64_t SharedRuntime::_nof_normal_calls = 0;
 261 int64_t SharedRuntime::_nof_inlined_calls = 0;
 262 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 263 int64_t SharedRuntime::_nof_static_calls = 0;
 264 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 265 int64_t SharedRuntime::_nof_interface_calls = 0;
 266 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 267 
 268 uint SharedRuntime::_new_instance_ctr=0;
 269 uint SharedRuntime::_new_array_ctr=0;
 270 uint SharedRuntime::_multi2_ctr=0;
 271 uint SharedRuntime::_multi3_ctr=0;
 272 uint SharedRuntime::_multi4_ctr=0;
 273 uint SharedRuntime::_multi5_ctr=0;
 274 uint SharedRuntime::_mon_enter_stub_ctr=0;
 275 uint SharedRuntime::_mon_exit_stub_ctr=0;
 276 uint SharedRuntime::_mon_enter_ctr=0;

 290 uint SharedRuntime::_unsafe_set_memory_ctr=0;
 291 
 292 int     SharedRuntime::_ICmiss_index                    = 0;
 293 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 294 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 295 
 296 
 297 void SharedRuntime::trace_ic_miss(address at) {
 298   for (int i = 0; i < _ICmiss_index; i++) {
 299     if (_ICmiss_at[i] == at) {
 300       _ICmiss_count[i]++;
 301       return;
 302     }
 303   }
 304   int index = _ICmiss_index++;
 305   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 306   _ICmiss_at[index] = at;
 307   _ICmiss_count[index] = 1;
 308 }
 309 
 310 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
 311   if (ICMissHistogram) {
 312     st->print_cr("IC Miss Histogram:");
 313     int tot_misses = 0;
 314     for (int i = 0; i < _ICmiss_index; i++) {
 315       st->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 316       tot_misses += _ICmiss_count[i];
 317     }
 318     st->print_cr("Total IC misses: %7d", tot_misses);
 319   }
 320 }
 321 #endif // !PRODUCT
 322 
 323 
 324 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 325   return x * y;
 326 JRT_END
 327 
 328 
 329 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 330   if (x == min_jlong && y == CONST64(-1)) {
 331     return x;
 332   } else {
 333     return x / y;
 334   }
 335 JRT_END
 336 
 337 
 338 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 339   if (x == min_jlong && y == CONST64(-1)) {
 340     return 0;
 341   } else {

 768   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 769   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 770   JNIHandles::destroy_local(vthread);
 771 JRT_END
 772 #endif // INCLUDE_JVMTI
 773 
 774 // The interpreter code to call this tracing function is only
 775 // called/generated when UL is on for redefine, class and has the right level
 776 // and tags. Since obsolete methods are never compiled, we don't have
 777 // to modify the compilers to generate calls to this function.
 778 //
 779 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 780     JavaThread* thread, Method* method))
 781   if (method->is_obsolete()) {
 782     // We are calling an obsolete method, but this is not necessarily
 783     // an error. Our method could have been redefined just after we
 784     // fetched the Method* from the constant pool.
 785     ResourceMark rm;
 786     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 787   }
 788 
 789   LogStreamHandle(Trace, interpreter, bytecode) log;
 790   if (log.is_enabled()) {
 791     ResourceMark rm;
 792     log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
 793               p2i(thread),
 794               (method->is_static() ? "static" : "virtual"),
 795               method->name_and_sig_as_C_string(),
 796               (method->is_native() ? " native" : ""),
 797               (thread->class_being_initialized() != nullptr ? " clinit" : ""),
 798               (method->method_holder()->is_initialized() ? "" : " being_initialized"));
 799   }
 800   return 0;
 801 JRT_END
 802 
 803 // ret_pc points into caller; we are returning caller's exception handler
 804 // for given exception
 805 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 806 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 807                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 808   assert(nm != nullptr, "must exist");
 809   ResourceMark rm;
 810 
 811 #if INCLUDE_JVMCI
 812   if (nm->is_compiled_by_jvmci()) {
 813     // lookup exception handler for this pc
 814     int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
 815     ExceptionHandlerTable table(nm);
 816     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 817     if (t != nullptr) {
 818       return nm->code_begin() + t->pco();
 819     } else {

1419 
1420   // determine call info & receiver
1421   // note: a) receiver is null for static calls
1422   //       b) an exception is thrown if receiver is null for non-static calls
1423   CallInfo call_info;
1424   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1425   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1426 
1427   NoSafepointVerifier nsv;
1428 
1429   methodHandle callee_method(current, call_info.selected_method());
1430 
1431   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1432          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1433          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1434          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1435          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1436 
1437   assert(!caller_nm->is_unloading(), "It should not be unloading");
1438 

1439   // tracing/debugging/statistics
1440   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1441                  (is_virtual) ? (&_resolve_virtual_ctr) :
1442                                 (&_resolve_static_ctr);
1443   Atomic::inc(addr);
1444 
1445 #ifndef PRODUCT
1446   if (TraceCallFixup) {
1447     ResourceMark rm(current);
1448     tty->print("resolving %s%s (%s) call to",
1449                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1450                Bytecodes::name(invoke_code));
1451     callee_method->print_short_name(tty);
1452     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1453                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1454   }
1455 #endif
1456 
1457   if (invoke_code == Bytecodes::_invokestatic) {
1458     assert(callee_method->method_holder()->is_initialized() ||
1459            callee_method->method_holder()->is_reentrant_initialization(current),
1460            "invalid class initialization state for invoke_static");
1461     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1462       // In order to keep class initialization check, do not patch call
1463       // site for static call when the class is not fully initialized.
1464       // Proper check is enforced by call site re-resolution on every invocation.
1465       //

1481 
1482   // Make sure the callee nmethod does not get deoptimized and removed before
1483   // we are done patching the code.
1484 
1485 
1486   CompiledICLocker ml(caller_nm);
1487   if (is_virtual && !is_optimized) {
1488     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1489     inline_cache->update(&call_info, receiver->klass());
1490   } else {
1491     // Callsite is a direct call - set it to the destination method
1492     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1493     callsite->set(callee_method);
1494   }
1495 
1496   return callee_method;
1497 }
1498 
1499 // Inline caches exist only in compiled code
1500 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1501   PerfTraceTime timer(_perf_ic_miss_total_time);
1502 
1503 #ifdef ASSERT
1504   RegisterMap reg_map(current,
1505                       RegisterMap::UpdateMap::skip,
1506                       RegisterMap::ProcessFrames::include,
1507                       RegisterMap::WalkContinuation::skip);
1508   frame stub_frame = current->last_frame();
1509   assert(stub_frame.is_runtime_frame(), "sanity check");
1510   frame caller_frame = stub_frame.sender(&reg_map);
1511   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1512 #endif /* ASSERT */
1513 
1514   methodHandle callee_method;
1515   JRT_BLOCK
1516     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1517     // Return Method* through TLS
1518     current->set_vm_result_2(callee_method());
1519   JRT_BLOCK_END
1520   // return compiled code entry point after potential safepoints
1521   return get_resolved_entry(current, callee_method);
1522 JRT_END
1523 
1524 
1525 // Handle call site that has been made non-entrant
1526 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1527   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1528 
1529   // 6243940 We might end up in here if the callee is deoptimized
1530   // as we race to call it.  We don't want to take a safepoint if
1531   // the caller was interpreted because the caller frame will look
1532   // interpreted to the stack walkers and arguments are now
1533   // "compiled" so it is much better to make this transition
1534   // invisible to the stack walking code. The i2c path will
1535   // place the callee method in the callee_target. It is stashed
1536   // there because if we try and find the callee by normal means a
1537   // safepoint is possible and have trouble gc'ing the compiled args.
1538   RegisterMap reg_map(current,
1539                       RegisterMap::UpdateMap::skip,
1540                       RegisterMap::ProcessFrames::include,
1541                       RegisterMap::WalkContinuation::skip);
1542   frame stub_frame = current->last_frame();
1543   assert(stub_frame.is_runtime_frame(), "sanity check");
1544   frame caller_frame = stub_frame.sender(&reg_map);
1545 
1546   if (caller_frame.is_interpreted_frame() ||
1547       caller_frame.is_entry_frame() ||
1548       caller_frame.is_upcall_stub_frame()) {

1561       // so bypassing it in c2i adapter is benign.
1562       return callee->get_c2i_no_clinit_check_entry();
1563     } else {
1564       return callee->get_c2i_entry();
1565     }
1566   }
1567 
1568   // Must be compiled to compiled path which is safe to stackwalk
1569   methodHandle callee_method;
1570   JRT_BLOCK
1571     // Force resolving of caller (if we called from compiled frame)
1572     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1573     current->set_vm_result_2(callee_method());
1574   JRT_BLOCK_END
1575   // return compiled code entry point after potential safepoints
1576   return get_resolved_entry(current, callee_method);
1577 JRT_END
1578 
1579 // Handle abstract method call
1580 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1581   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1582 
1583   // Verbose error message for AbstractMethodError.
1584   // Get the called method from the invoke bytecode.
1585   vframeStream vfst(current, true);
1586   assert(!vfst.at_end(), "Java frame must exist");
1587   methodHandle caller(current, vfst.method());
1588   Bytecode_invoke invoke(caller, vfst.bci());
1589   DEBUG_ONLY( invoke.verify(); )
1590 
1591   // Find the compiled caller frame.
1592   RegisterMap reg_map(current,
1593                       RegisterMap::UpdateMap::include,
1594                       RegisterMap::ProcessFrames::include,
1595                       RegisterMap::WalkContinuation::skip);
1596   frame stubFrame = current->last_frame();
1597   assert(stubFrame.is_runtime_frame(), "must be");
1598   frame callerFrame = stubFrame.sender(&reg_map);
1599   assert(callerFrame.is_compiled_frame(), "must be");
1600 
1601   // Install exception and return forward entry.
1602   address res = SharedRuntime::throw_AbstractMethodError_entry();

1609       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1610     }
1611   JRT_BLOCK_END
1612   return res;
1613 JRT_END
1614 
1615 // return verified_code_entry if interp_only_mode is not set for the current thread;
1616 // otherwise return c2i entry.
1617 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1618   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1619     // In interp_only_mode we need to go to the interpreted entry
1620     // The c2i won't patch in this mode -- see fixup_callers_callsite
1621     return callee_method->get_c2i_entry();
1622   }
1623   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1624   return callee_method->verified_code_entry();
1625 }
1626 
1627 // resolve a static call and patch code
1628 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1629   PerfTraceTime timer(_perf_resolve_static_total_time);
1630 
1631   methodHandle callee_method;
1632   bool enter_special = false;
1633   JRT_BLOCK
1634     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1635     current->set_vm_result_2(callee_method());
1636   JRT_BLOCK_END
1637   // return compiled code entry point after potential safepoints
1638   return get_resolved_entry(current, callee_method);
1639 JRT_END
1640 
1641 // resolve virtual call and update inline cache to monomorphic
1642 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1643   PerfTraceTime timer(_perf_resolve_virtual_total_time);
1644 
1645   methodHandle callee_method;
1646   JRT_BLOCK
1647     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1648     current->set_vm_result_2(callee_method());
1649   JRT_BLOCK_END
1650   // return compiled code entry point after potential safepoints
1651   return get_resolved_entry(current, callee_method);
1652 JRT_END
1653 
1654 
1655 // Resolve a virtual call that can be statically bound (e.g., always
1656 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1657 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1658   PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1659 
1660   methodHandle callee_method;
1661   JRT_BLOCK
1662     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1663     current->set_vm_result_2(callee_method());
1664   JRT_BLOCK_END
1665   // return compiled code entry point after potential safepoints
1666   return get_resolved_entry(current, callee_method);
1667 JRT_END
1668 
1669 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1670   JavaThread* current = THREAD;
1671   ResourceMark rm(current);
1672   CallInfo call_info;
1673   Bytecodes::Code bc;
1674 
1675   // receiver is null for static calls. An exception is thrown for null
1676   // receivers for non-static calls
1677   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1678 
1679   methodHandle callee_method(current, call_info.selected_method());
1680 

1681   Atomic::inc(&_ic_miss_ctr);
1682 
1683 #ifndef PRODUCT
1684   // Statistics & Tracing
1685   if (TraceCallFixup) {
1686     ResourceMark rm(current);
1687     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1688     callee_method->print_short_name(tty);
1689     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1690   }
1691 
1692   if (ICMissHistogram) {
1693     MutexLocker m(VMStatistic_lock);
1694     RegisterMap reg_map(current,
1695                         RegisterMap::UpdateMap::skip,
1696                         RegisterMap::ProcessFrames::include,
1697                         RegisterMap::WalkContinuation::skip);
1698     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1699     // produce statistics under the lock
1700     trace_ic_miss(f.pc());
1701   }
1702 #endif
1703 

1786             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1787             cdc->set_to_clean();
1788             break;
1789           }
1790 
1791           case relocInfo::virtual_call_type: {
1792             // compiled, dispatched call (which used to call an interpreted method)
1793             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1794             inline_cache->set_to_clean();
1795             break;
1796           }
1797           default:
1798             break;
1799         }
1800       }
1801     }
1802   }
1803 
1804   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1805 


1806   Atomic::inc(&_wrong_method_ctr);
1807 
1808 #ifndef PRODUCT
1809   if (TraceCallFixup) {
1810     ResourceMark rm(current);
1811     tty->print("handle_wrong_method reresolving call to");
1812     callee_method->print_short_name(tty);
1813     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1814   }
1815 #endif
1816 
1817   return callee_method;
1818 }
1819 
1820 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1821   // The faulting unsafe accesses should be changed to throw the error
1822   // synchronously instead. Meanwhile the faulting instruction will be
1823   // skipped over (effectively turning it into a no-op) and an
1824   // asynchronous exception will be raised which the thread will
1825   // handle at a later point. If the instruction is a load it will
1826   // return garbage.
1827 
1828   // Request an async exception.

2086 // This is only called when CheckJNICalls is true, and only
2087 // for virtual thread termination.
2088 JRT_LEAF(void,  SharedRuntime::log_jni_monitor_still_held())
2089   assert(CheckJNICalls, "Only call this when checking JNI usage");
2090   if (log_is_enabled(Debug, jni)) {
2091     JavaThread* current = JavaThread::current();
2092     int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2093     int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2094     log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2095                    ") exiting with Objects still locked by JNI MonitorEnter.",
2096                    vthread_id, carrier_id);
2097   }
2098 JRT_END
2099 
2100 #ifndef PRODUCT
2101 
2102 void SharedRuntime::print_statistics() {
2103   ttyLocker ttyl;
2104   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
2105 
2106   SharedRuntime::print_ic_miss_histogram_on(tty);
2107   SharedRuntime::print_counters_on(tty);
2108   AdapterHandlerLibrary::print_statistics_on(tty);
































2109 
2110   if (xtty != nullptr)  xtty->tail("statistics");
2111 }
2112 
2113 //void SharedRuntime::print_counters_on(outputStream* st) {
2114 //  // Dump the JRT_ENTRY counters
2115 //  if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2116 //  if (_new_array_ctr)    st->print_cr("%5u new array requires GC", _new_array_ctr);
2117 //  if (_multi2_ctr)       st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2118 //  if (_multi3_ctr)       st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2119 //  if (_multi4_ctr)       st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2120 //  if (_multi5_ctr)       st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2121 //
2122 //  st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2123 //  st->print_cr("%5u wrong method", _wrong_method_ctr);
2124 //  st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2125 //  st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2126 //  st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2127 //
2128 //  if (_mon_enter_stub_ctr)       st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2129 //  if (_mon_exit_stub_ctr)        st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2130 //  if (_mon_enter_ctr)            st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2131 //  if (_mon_exit_ctr)             st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2132 //  if (_partial_subtype_ctr)      st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2133 //  if (_jbyte_array_copy_ctr)     st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2134 //  if (_jshort_array_copy_ctr)    st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2135 //  if (_jint_array_copy_ctr)      st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2136 //  if (_jlong_array_copy_ctr)     st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2137 //  if (_oop_array_copy_ctr)       st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2138 //  if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2139 //  if (_unsafe_array_copy_ctr)    st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2140 //  if (_generic_array_copy_ctr)   st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2141 //  if (_slow_array_copy_ctr)      st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2142 //  if (_find_handler_ctr)         st->print_cr("%5u find exception handler", _find_handler_ctr);
2143 //  if (_rethrow_ctr)              st->print_cr("%5u rethrow handler", _rethrow_ctr);
2144 //  if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2145 //}
2146 
2147 inline double percent(int64_t x, int64_t y) {
2148   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2149 }
2150 
2151 class MethodArityHistogram {
2152  public:
2153   enum { MAX_ARITY = 256 };
2154  private:
2155   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2156   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2157   static uint64_t _total_compiled_calls;
2158   static uint64_t _max_compiled_calls_per_method;
2159   static int _max_arity;                       // max. arity seen
2160   static int _max_size;                        // max. arg size seen
2161 
2162   static void add_method_to_histogram(nmethod* nm) {
2163     Method* method = (nm == nullptr) ? nullptr : nm->method();
2164     if (method != nullptr) {
2165       ArgumentCount args(method->signature());
2166       int arity   = args.size() + (method->is_static() ? 0 : 1);

2211     // Take the Compile_lock to protect against changes in the CodeBlob structures
2212     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2213     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2214     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2215     _max_arity = _max_size = 0;
2216     _total_compiled_calls = 0;
2217     _max_compiled_calls_per_method = 0;
2218     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2219     CodeCache::nmethods_do(add_method_to_histogram);
2220     print_histogram();
2221   }
2222 };
2223 
2224 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2225 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2226 uint64_t MethodArityHistogram::_total_compiled_calls;
2227 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2228 int MethodArityHistogram::_max_arity;
2229 int MethodArityHistogram::_max_size;
2230 
2231 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2232   tty->print_cr("Calls from compiled code:");
2233   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2234   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2235   int64_t mono_i = _nof_interface_calls;
2236   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2237   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2238   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2239   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2240   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2241   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2242   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2243   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2244   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2245   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2246   tty->cr();
2247   tty->print_cr("Note 1: counter updates are not MT-safe.");
2248   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2249   tty->print_cr("        %% in nested categories are relative to their category");
2250   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2251   tty->cr();

2464                   AdapterFingerPrint::equals>;
2465 static AdapterHandlerTable* _adapter_handler_table;
2466 
2467 // Find a entry with the same fingerprint if it exists
2468 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2469   NOT_PRODUCT(_lookups++);
2470   assert_lock_strong(AdapterHandlerLibrary_lock);
2471   AdapterFingerPrint fp(total_args_passed, sig_bt);
2472   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2473   if (entry != nullptr) {
2474 #ifndef PRODUCT
2475     if (fp.is_compact()) _compact++;
2476     _hits++;
2477 #endif
2478     return *entry;
2479   }
2480   return nullptr;
2481 }
2482 
2483 #ifndef PRODUCT
2484 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2485   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2486     return sizeof(*key) + sizeof(*a);
2487   };
2488   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2489   ts.print(st, "AdapterHandlerTable");
2490   st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2491                _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2492   st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2493                _lookups, _equals, _hits, _compact);
2494 }
2495 #endif // !PRODUCT
2496 
2497 // ---------------------------------------------------------------------------
2498 // Implementation of AdapterHandlerLibrary
2499 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2500 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2501 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2502 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2503 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2504 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2505 const int AdapterHandlerLibrary_size = 16*K;
2506 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2507 
2508 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2509   return _buffer;
2510 }
2511 
2512 static void post_adapter_creation(const AdapterBlob* new_adapter,
2513                                   const AdapterHandlerEntry* entry) {
2514   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2515     char blob_id[256];

3163   assert(found, "Should have found handler");
3164 }
3165 
3166 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3167   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3168   if (get_i2c_entry() != nullptr) {
3169     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3170   }
3171   if (get_c2i_entry() != nullptr) {
3172     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3173   }
3174   if (get_c2i_unverified_entry() != nullptr) {
3175     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3176   }
3177   if (get_c2i_no_clinit_check_entry() != nullptr) {
3178     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3179   }
3180   st->cr();
3181 }
3182 








3183 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3184   assert(current == JavaThread::current(), "pre-condition");
3185   StackOverflow* overflow_state = current->stack_overflow_state();
3186   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3187   overflow_state->set_reserved_stack_activation(current->stack_base());
3188 JRT_END
3189 
3190 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3191   ResourceMark rm(current);
3192   frame activation;
3193   nmethod* nm = nullptr;
3194   int count = 1;
3195 
3196   assert(fr.is_java_frame(), "Must start on Java frame");
3197 
3198   RegisterMap map(JavaThread::current(),
3199                   RegisterMap::UpdateMap::skip,
3200                   RegisterMap::ProcessFrames::skip,
3201                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3202   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
< prev index next >