< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "classfile/javaClasses.inline.hpp"
  27 #include "classfile/stringTable.hpp"
  28 #include "classfile/vmClasses.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "code/codeCache.hpp"
  31 #include "code/compiledIC.hpp"
  32 #include "code/nmethod.inline.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "code/vtableStubs.hpp"
  35 #include "compiler/abstractCompiler.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/disassembler.hpp"
  38 #include "gc/shared/barrierSet.hpp"
  39 #include "gc/shared/collectedHeap.hpp"
  40 #include "gc/shared/gcLocker.inline.hpp"
  41 #include "interpreter/interpreter.hpp"
  42 #include "interpreter/interpreterRuntime.hpp"
  43 #include "jvm.h"
  44 #include "jfr/jfrEvents.hpp"
  45 #include "logging/log.hpp"
  46 #include "memory/resourceArea.hpp"
  47 #include "memory/universe.hpp"
  48 #include "metaprogramming/primitiveConversions.hpp"
  49 #include "oops/klass.hpp"
  50 #include "oops/method.inline.hpp"
  51 #include "oops/objArrayKlass.hpp"
  52 #include "oops/oop.inline.hpp"
  53 #include "prims/forte.hpp"
  54 #include "prims/jvmtiExport.hpp"
  55 #include "prims/jvmtiThreadState.hpp"
  56 #include "prims/methodHandles.hpp"
  57 #include "prims/nativeLookup.hpp"
  58 #include "runtime/atomic.hpp"
  59 #include "runtime/frame.inline.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/init.hpp"
  62 #include "runtime/interfaceSupport.inline.hpp"
  63 #include "runtime/java.hpp"
  64 #include "runtime/javaCalls.hpp"
  65 #include "runtime/jniHandles.inline.hpp"

  66 #include "runtime/sharedRuntime.hpp"
  67 #include "runtime/stackWatermarkSet.hpp"
  68 #include "runtime/stubRoutines.hpp"
  69 #include "runtime/synchronizer.hpp"
  70 #include "runtime/vframe.inline.hpp"
  71 #include "runtime/vframeArray.hpp"
  72 #include "runtime/vm_version.hpp"

  73 #include "utilities/copy.hpp"
  74 #include "utilities/dtrace.hpp"
  75 #include "utilities/events.hpp"
  76 #include "utilities/resourceHash.hpp"
  77 #include "utilities/macros.hpp"
  78 #include "utilities/xmlstream.hpp"
  79 #ifdef COMPILER1
  80 #include "c1/c1_Runtime1.hpp"
  81 #endif
  82 #if INCLUDE_JFR
  83 #include "jfr/jfr.hpp"
  84 #endif
  85 
  86 // Shared stub locations
  87 RuntimeStub*        SharedRuntime::_wrong_method_blob;
  88 RuntimeStub*        SharedRuntime::_wrong_method_abstract_blob;
  89 RuntimeStub*        SharedRuntime::_ic_miss_blob;
  90 RuntimeStub*        SharedRuntime::_resolve_opt_virtual_call_blob;
  91 RuntimeStub*        SharedRuntime::_resolve_virtual_call_blob;
  92 RuntimeStub*        SharedRuntime::_resolve_static_call_blob;
  93 address             SharedRuntime::_resolve_static_call_entry;
  94 
  95 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  96 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
  97 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
  98 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
  99 
 100 #ifdef COMPILER2
 101 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
 102 #endif // COMPILER2
 103 
 104 nmethod*            SharedRuntime::_cont_doYield_stub;
 105 






 106 //----------------------------generate_stubs-----------------------------------
 107 void SharedRuntime::generate_stubs() {
 108   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
 109   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
 110   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
 111   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
 112   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
 113   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
 114   _resolve_static_call_entry           = _resolve_static_call_blob->entry_point();
 115 
 116   AdapterHandlerLibrary::initialize();
 117 
 118 #if COMPILER2_OR_JVMCI
 119   // Vectors are generated only by C2 and JVMCI.
 120   bool support_wide = is_wide_vector(MaxVectorSize);
 121   if (support_wide) {
 122     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 123   }
 124 #endif // COMPILER2_OR_JVMCI
 125   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 126   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 127 
 128   generate_deopt_blob();
 129 
 130 #ifdef COMPILER2
 131   generate_uncommon_trap_blob();
 132 #endif // COMPILER2



















































 133 }
 134 
 135 #include <math.h>
 136 
 137 // Implementation of SharedRuntime
 138 
 139 #ifndef PRODUCT
 140 // For statistics
 141 uint SharedRuntime::_ic_miss_ctr = 0;
 142 uint SharedRuntime::_wrong_method_ctr = 0;
 143 uint SharedRuntime::_resolve_static_ctr = 0;
 144 uint SharedRuntime::_resolve_virtual_ctr = 0;
 145 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;


 146 uint SharedRuntime::_implicit_null_throws = 0;
 147 uint SharedRuntime::_implicit_div0_throws = 0;
 148 
 149 int64_t SharedRuntime::_nof_normal_calls = 0;
 150 int64_t SharedRuntime::_nof_inlined_calls = 0;
 151 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 152 int64_t SharedRuntime::_nof_static_calls = 0;
 153 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 154 int64_t SharedRuntime::_nof_interface_calls = 0;
 155 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 156 
 157 uint SharedRuntime::_new_instance_ctr=0;
 158 uint SharedRuntime::_new_array_ctr=0;
 159 uint SharedRuntime::_multi2_ctr=0;
 160 uint SharedRuntime::_multi3_ctr=0;
 161 uint SharedRuntime::_multi4_ctr=0;
 162 uint SharedRuntime::_multi5_ctr=0;
 163 uint SharedRuntime::_mon_enter_stub_ctr=0;
 164 uint SharedRuntime::_mon_exit_stub_ctr=0;
 165 uint SharedRuntime::_mon_enter_ctr=0;

 179 uint SharedRuntime::_unsafe_set_memory_ctr=0;
 180 
 181 int     SharedRuntime::_ICmiss_index                    = 0;
 182 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 183 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 184 
 185 
 186 void SharedRuntime::trace_ic_miss(address at) {
 187   for (int i = 0; i < _ICmiss_index; i++) {
 188     if (_ICmiss_at[i] == at) {
 189       _ICmiss_count[i]++;
 190       return;
 191     }
 192   }
 193   int index = _ICmiss_index++;
 194   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 195   _ICmiss_at[index] = at;
 196   _ICmiss_count[index] = 1;
 197 }
 198 
 199 void SharedRuntime::print_ic_miss_histogram() {
 200   if (ICMissHistogram) {
 201     tty->print_cr("IC Miss Histogram:");
 202     int tot_misses = 0;
 203     for (int i = 0; i < _ICmiss_index; i++) {
 204       tty->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 205       tot_misses += _ICmiss_count[i];
 206     }
 207     tty->print_cr("Total IC misses: %7d", tot_misses);
 208   }
 209 }
 210 #endif // PRODUCT
 211 
 212 
 213 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 214   return x * y;
 215 JRT_END
 216 
 217 
 218 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 219   if (x == min_jlong && y == CONST64(-1)) {
 220     return x;
 221   } else {
 222     return x / y;
 223   }
 224 JRT_END
 225 
 226 
 227 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 228   if (x == min_jlong && y == CONST64(-1)) {
 229     return 0;
 230   } else {

 657   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 658   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 659   JNIHandles::destroy_local(vthread);
 660 JRT_END
 661 #endif // INCLUDE_JVMTI
 662 
 663 // The interpreter code to call this tracing function is only
 664 // called/generated when UL is on for redefine, class and has the right level
 665 // and tags. Since obsolete methods are never compiled, we don't have
 666 // to modify the compilers to generate calls to this function.
 667 //
 668 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 669     JavaThread* thread, Method* method))
 670   if (method->is_obsolete()) {
 671     // We are calling an obsolete method, but this is not necessarily
 672     // an error. Our method could have been redefined just after we
 673     // fetched the Method* from the constant pool.
 674     ResourceMark rm;
 675     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 676   }












 677   return 0;
 678 JRT_END
 679 
 680 // ret_pc points into caller; we are returning caller's exception handler
 681 // for given exception
 682 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 683 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 684                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 685   assert(nm != nullptr, "must exist");
 686   ResourceMark rm;
 687 
 688 #if INCLUDE_JVMCI
 689   if (nm->is_compiled_by_jvmci()) {
 690     // lookup exception handler for this pc
 691     int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
 692     ExceptionHandlerTable table(nm);
 693     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 694     if (t != nullptr) {
 695       return nm->code_begin() + t->pco();
 696     } else {

1296 
1297   // determine call info & receiver
1298   // note: a) receiver is null for static calls
1299   //       b) an exception is thrown if receiver is null for non-static calls
1300   CallInfo call_info;
1301   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1302   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1303 
1304   NoSafepointVerifier nsv;
1305 
1306   methodHandle callee_method(current, call_info.selected_method());
1307 
1308   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1309          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1310          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1311          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1312          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1313 
1314   assert(!caller_nm->is_unloading(), "It should not be unloading");
1315 
1316 #ifndef PRODUCT
1317   // tracing/debugging/statistics
1318   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1319                  (is_virtual) ? (&_resolve_virtual_ctr) :
1320                                 (&_resolve_static_ctr);
1321   Atomic::inc(addr);
1322 

1323   if (TraceCallFixup) {
1324     ResourceMark rm(current);
1325     tty->print("resolving %s%s (%s) call to",
1326                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1327                Bytecodes::name(invoke_code));
1328     callee_method->print_short_name(tty);
1329     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1330                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1331   }
1332 #endif
1333 
1334   if (invoke_code == Bytecodes::_invokestatic) {
1335     assert(callee_method->method_holder()->is_initialized() ||
1336            callee_method->method_holder()->is_init_thread(current),
1337            "invalid class initialization state for invoke_static");
1338     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1339       // In order to keep class initialization check, do not patch call
1340       // site for static call when the class is not fully initialized.
1341       // Proper check is enforced by call site re-resolution on every invocation.
1342       //

1358 
1359   // Make sure the callee nmethod does not get deoptimized and removed before
1360   // we are done patching the code.
1361 
1362 
1363   CompiledICLocker ml(caller_nm);
1364   if (is_virtual && !is_optimized) {
1365     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1366     inline_cache->update(&call_info, receiver->klass());
1367   } else {
1368     // Callsite is a direct call - set it to the destination method
1369     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1370     callsite->set(callee_method);
1371   }
1372 
1373   return callee_method;
1374 }
1375 
1376 // Inline caches exist only in compiled code
1377 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))


1378 #ifdef ASSERT
1379   RegisterMap reg_map(current,
1380                       RegisterMap::UpdateMap::skip,
1381                       RegisterMap::ProcessFrames::include,
1382                       RegisterMap::WalkContinuation::skip);
1383   frame stub_frame = current->last_frame();
1384   assert(stub_frame.is_runtime_frame(), "sanity check");
1385   frame caller_frame = stub_frame.sender(&reg_map);
1386   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1387 #endif /* ASSERT */
1388 
1389   methodHandle callee_method;
1390   JRT_BLOCK
1391     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1392     // Return Method* through TLS
1393     current->set_vm_result_2(callee_method());
1394   JRT_BLOCK_END
1395   // return compiled code entry point after potential safepoints
1396   return get_resolved_entry(current, callee_method);
1397 JRT_END
1398 
1399 
1400 // Handle call site that has been made non-entrant
1401 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))


1402   // 6243940 We might end up in here if the callee is deoptimized
1403   // as we race to call it.  We don't want to take a safepoint if
1404   // the caller was interpreted because the caller frame will look
1405   // interpreted to the stack walkers and arguments are now
1406   // "compiled" so it is much better to make this transition
1407   // invisible to the stack walking code. The i2c path will
1408   // place the callee method in the callee_target. It is stashed
1409   // there because if we try and find the callee by normal means a
1410   // safepoint is possible and have trouble gc'ing the compiled args.
1411   RegisterMap reg_map(current,
1412                       RegisterMap::UpdateMap::skip,
1413                       RegisterMap::ProcessFrames::include,
1414                       RegisterMap::WalkContinuation::skip);
1415   frame stub_frame = current->last_frame();
1416   assert(stub_frame.is_runtime_frame(), "sanity check");
1417   frame caller_frame = stub_frame.sender(&reg_map);
1418 
1419   if (caller_frame.is_interpreted_frame() ||
1420       caller_frame.is_entry_frame() ||
1421       caller_frame.is_upcall_stub_frame()) {

1434       // so bypassing it in c2i adapter is benign.
1435       return callee->get_c2i_no_clinit_check_entry();
1436     } else {
1437       return callee->get_c2i_entry();
1438     }
1439   }
1440 
1441   // Must be compiled to compiled path which is safe to stackwalk
1442   methodHandle callee_method;
1443   JRT_BLOCK
1444     // Force resolving of caller (if we called from compiled frame)
1445     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1446     current->set_vm_result_2(callee_method());
1447   JRT_BLOCK_END
1448   // return compiled code entry point after potential safepoints
1449   return get_resolved_entry(current, callee_method);
1450 JRT_END
1451 
1452 // Handle abstract method call
1453 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))


1454   // Verbose error message for AbstractMethodError.
1455   // Get the called method from the invoke bytecode.
1456   vframeStream vfst(current, true);
1457   assert(!vfst.at_end(), "Java frame must exist");
1458   methodHandle caller(current, vfst.method());
1459   Bytecode_invoke invoke(caller, vfst.bci());
1460   DEBUG_ONLY( invoke.verify(); )
1461 
1462   // Find the compiled caller frame.
1463   RegisterMap reg_map(current,
1464                       RegisterMap::UpdateMap::include,
1465                       RegisterMap::ProcessFrames::include,
1466                       RegisterMap::WalkContinuation::skip);
1467   frame stubFrame = current->last_frame();
1468   assert(stubFrame.is_runtime_frame(), "must be");
1469   frame callerFrame = stubFrame.sender(&reg_map);
1470   assert(callerFrame.is_compiled_frame(), "must be");
1471 
1472   // Install exception and return forward entry.
1473   address res = StubRoutines::throw_AbstractMethodError_entry();

1480       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1481     }
1482   JRT_BLOCK_END
1483   return res;
1484 JRT_END
1485 
1486 // return verified_code_entry if interp_only_mode is not set for the current thread;
1487 // otherwise return c2i entry.
1488 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1489   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1490     // In interp_only_mode we need to go to the interpreted entry
1491     // The c2i won't patch in this mode -- see fixup_callers_callsite
1492     return callee_method->get_c2i_entry();
1493   }
1494   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1495   return callee_method->verified_code_entry();
1496 }
1497 
1498 // resolve a static call and patch code
1499 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))


1500   methodHandle callee_method;
1501   bool enter_special = false;
1502   JRT_BLOCK
1503     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1504     current->set_vm_result_2(callee_method());
1505   JRT_BLOCK_END
1506   // return compiled code entry point after potential safepoints
1507   return get_resolved_entry(current, callee_method);
1508 JRT_END
1509 
1510 // resolve virtual call and update inline cache to monomorphic
1511 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))


1512   methodHandle callee_method;
1513   JRT_BLOCK
1514     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1515     current->set_vm_result_2(callee_method());
1516   JRT_BLOCK_END
1517   // return compiled code entry point after potential safepoints
1518   return get_resolved_entry(current, callee_method);
1519 JRT_END
1520 
1521 
1522 // Resolve a virtual call that can be statically bound (e.g., always
1523 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1524 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))


1525   methodHandle callee_method;
1526   JRT_BLOCK
1527     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1528     current->set_vm_result_2(callee_method());
1529   JRT_BLOCK_END
1530   // return compiled code entry point after potential safepoints
1531   return get_resolved_entry(current, callee_method);
1532 JRT_END
1533 
1534 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1535   JavaThread* current = THREAD;
1536   ResourceMark rm(current);
1537   CallInfo call_info;
1538   Bytecodes::Code bc;
1539 
1540   // receiver is null for static calls. An exception is thrown for null
1541   // receivers for non-static calls
1542   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1543 
1544   methodHandle callee_method(current, call_info.selected_method());
1545 
1546 #ifndef PRODUCT
1547   Atomic::inc(&_ic_miss_ctr);
1548 

1549   // Statistics & Tracing
1550   if (TraceCallFixup) {
1551     ResourceMark rm(current);
1552     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1553     callee_method->print_short_name(tty);
1554     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1555   }
1556 
1557   if (ICMissHistogram) {
1558     MutexLocker m(VMStatistic_lock);
1559     RegisterMap reg_map(current,
1560                         RegisterMap::UpdateMap::skip,
1561                         RegisterMap::ProcessFrames::include,
1562                         RegisterMap::WalkContinuation::skip);
1563     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1564     // produce statistics under the lock
1565     trace_ic_miss(f.pc());
1566   }
1567 #endif
1568 

1651             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1652             cdc->set_to_clean();
1653             break;
1654           }
1655 
1656           case relocInfo::virtual_call_type: {
1657             // compiled, dispatched call (which used to call an interpreted method)
1658             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1659             inline_cache->set_to_clean();
1660             break;
1661           }
1662           default:
1663             break;
1664         }
1665       }
1666     }
1667   }
1668 
1669   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1670 
1671 
1672 #ifndef PRODUCT
1673   Atomic::inc(&_wrong_method_ctr);
1674 

1675   if (TraceCallFixup) {
1676     ResourceMark rm(current);
1677     tty->print("handle_wrong_method reresolving call to");
1678     callee_method->print_short_name(tty);
1679     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1680   }
1681 #endif
1682 
1683   return callee_method;
1684 }
1685 
1686 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1687   // The faulting unsafe accesses should be changed to throw the error
1688   // synchronously instead. Meanwhile the faulting instruction will be
1689   // skipped over (effectively turning it into a no-op) and an
1690   // asynchronous exception will be raised which the thread will
1691   // handle at a later point. If the instruction is a load it will
1692   // return garbage.
1693 
1694   // Request an async exception.

1932 // This is only called when CheckJNICalls is true, and only
1933 // for virtual thread termination.
1934 JRT_LEAF(void,  SharedRuntime::log_jni_monitor_still_held())
1935   assert(CheckJNICalls, "Only call this when checking JNI usage");
1936   if (log_is_enabled(Debug, jni)) {
1937     JavaThread* current = JavaThread::current();
1938     int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
1939     int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
1940     log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
1941                    ") exiting with Objects still locked by JNI MonitorEnter.",
1942                    vthread_id, carrier_id);
1943   }
1944 JRT_END
1945 
1946 #ifndef PRODUCT
1947 
1948 void SharedRuntime::print_statistics() {
1949   ttyLocker ttyl;
1950   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
1951 
1952   SharedRuntime::print_ic_miss_histogram();
1953 
1954   // Dump the JRT_ENTRY counters
1955   if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
1956   if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
1957   if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
1958   if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
1959   if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
1960   if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
1961 
1962   tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
1963   tty->print_cr("%5u wrong method", _wrong_method_ctr);
1964   tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
1965   tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
1966   tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
1967 
1968   if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
1969   if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
1970   if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
1971   if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
1972   if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
1973   if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
1974   if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
1975   if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
1976   if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
1977   if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
1978   if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
1979   if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
1980   if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
1981   if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
1982   if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
1983   if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
1984   if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
1985 
1986   AdapterHandlerLibrary::print_statistics();
1987 
1988   if (xtty != nullptr)  xtty->tail("statistics");
1989 }
1990 


































1991 inline double percent(int64_t x, int64_t y) {
1992   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
1993 }
1994 
1995 class MethodArityHistogram {
1996  public:
1997   enum { MAX_ARITY = 256 };
1998  private:
1999   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2000   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2001   static uint64_t _total_compiled_calls;
2002   static uint64_t _max_compiled_calls_per_method;
2003   static int _max_arity;                       // max. arity seen
2004   static int _max_size;                        // max. arg size seen
2005 
2006   static void add_method_to_histogram(nmethod* nm) {
2007     Method* method = (nm == nullptr) ? nullptr : nm->method();
2008     if (method != nullptr) {
2009       ArgumentCount args(method->signature());
2010       int arity   = args.size() + (method->is_static() ? 0 : 1);

2055     // Take the Compile_lock to protect against changes in the CodeBlob structures
2056     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2057     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2058     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2059     _max_arity = _max_size = 0;
2060     _total_compiled_calls = 0;
2061     _max_compiled_calls_per_method = 0;
2062     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2063     CodeCache::nmethods_do(add_method_to_histogram);
2064     print_histogram();
2065   }
2066 };
2067 
2068 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2069 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2070 uint64_t MethodArityHistogram::_total_compiled_calls;
2071 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2072 int MethodArityHistogram::_max_arity;
2073 int MethodArityHistogram::_max_size;
2074 
2075 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2076   tty->print_cr("Calls from compiled code:");
2077   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2078   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2079   int64_t mono_i = _nof_interface_calls;
2080   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2081   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2082   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2083   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2084   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2085   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2086   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2087   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2088   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2089   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2090   tty->cr();
2091   tty->print_cr("Note 1: counter updates are not MT-safe.");
2092   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2093   tty->print_cr("        %% in nested categories are relative to their category");
2094   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2095   tty->cr();

2308                   AdapterFingerPrint::equals>;
2309 static AdapterHandlerTable* _adapter_handler_table;
2310 
2311 // Find a entry with the same fingerprint if it exists
2312 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2313   NOT_PRODUCT(_lookups++);
2314   assert_lock_strong(AdapterHandlerLibrary_lock);
2315   AdapterFingerPrint fp(total_args_passed, sig_bt);
2316   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2317   if (entry != nullptr) {
2318 #ifndef PRODUCT
2319     if (fp.is_compact()) _compact++;
2320     _hits++;
2321 #endif
2322     return *entry;
2323   }
2324   return nullptr;
2325 }
2326 
2327 #ifndef PRODUCT
2328 static void print_table_statistics() {
2329   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2330     return sizeof(*key) + sizeof(*a);
2331   };
2332   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2333   ts.print(tty, "AdapterHandlerTable");
2334   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2335                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2336   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2337                 _lookups, _equals, _hits, _compact);
2338 }
2339 #endif
2340 
2341 // ---------------------------------------------------------------------------
2342 // Implementation of AdapterHandlerLibrary
2343 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2344 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2345 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2346 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2347 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2348 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2349 const int AdapterHandlerLibrary_size = 16*K;
2350 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2351 
2352 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2353   return _buffer;
2354 }
2355 
2356 static void post_adapter_creation(const AdapterBlob* new_adapter,
2357                                   const AdapterHandlerEntry* entry) {
2358   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2359     char blob_id[256];

2553         delete comparison_entry;
2554       }
2555 #endif
2556       return entry;
2557     }
2558 
2559     entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2560   }
2561 
2562   // Outside of the lock
2563   if (new_adapter != nullptr) {
2564     post_adapter_creation(new_adapter, entry);
2565   }
2566   return entry;
2567 }
2568 
2569 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2570                                                            int total_args_passed,
2571                                                            BasicType* sig_bt,
2572                                                            bool allocate_code_blob) {



2573 
2574   // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2575   // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2576   // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2577   // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2578   bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2579 
2580   VMRegPair stack_regs[16];
2581   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2582 
2583   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2584   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2585   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2586   CodeBuffer buffer(buf);
2587   short buffer_locs[20];
2588   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2589                                           sizeof(buffer_locs)/sizeof(relocInfo));
2590 
2591   // Make a C heap allocated version of the fingerprint to store in the adapter
2592   AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);

3002   assert(found, "Should have found handler");
3003 }
3004 
3005 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3006   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3007   if (get_i2c_entry() != nullptr) {
3008     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3009   }
3010   if (get_c2i_entry() != nullptr) {
3011     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3012   }
3013   if (get_c2i_unverified_entry() != nullptr) {
3014     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3015   }
3016   if (get_c2i_no_clinit_check_entry() != nullptr) {
3017     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3018   }
3019   st->cr();
3020 }
3021 
3022 #ifndef PRODUCT
3023 
3024 void AdapterHandlerLibrary::print_statistics() {
3025   print_table_statistics();
3026 }
3027 
3028 #endif /* PRODUCT */
3029 
3030 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3031   assert(current == JavaThread::current(), "pre-condition");
3032   StackOverflow* overflow_state = current->stack_overflow_state();
3033   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3034   overflow_state->set_reserved_stack_activation(current->stack_base());
3035 JRT_END
3036 
3037 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3038   ResourceMark rm(current);
3039   frame activation;
3040   nmethod* nm = nullptr;
3041   int count = 1;
3042 
3043   assert(fr.is_java_frame(), "Must start on Java frame");
3044 
3045   RegisterMap map(JavaThread::current(),
3046                   RegisterMap::UpdateMap::skip,
3047                   RegisterMap::ProcessFrames::skip,
3048                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3049   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/classLoader.hpp"
  27 #include "classfile/javaClasses.inline.hpp"
  28 #include "classfile/stringTable.hpp"
  29 #include "classfile/vmClasses.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "code/codeCache.hpp"
  32 #include "code/compiledIC.hpp"
  33 #include "code/nmethod.inline.hpp"
  34 #include "code/scopeDesc.hpp"
  35 #include "code/vtableStubs.hpp"
  36 #include "compiler/abstractCompiler.hpp"
  37 #include "compiler/compileBroker.hpp"
  38 #include "compiler/disassembler.hpp"
  39 #include "gc/shared/barrierSet.hpp"
  40 #include "gc/shared/collectedHeap.hpp"
  41 #include "gc/shared/gcLocker.inline.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "interpreter/interpreterRuntime.hpp"
  44 #include "jvm.h"
  45 #include "jfr/jfrEvents.hpp"
  46 #include "logging/log.hpp"
  47 #include "memory/resourceArea.hpp"
  48 #include "memory/universe.hpp"
  49 #include "metaprogramming/primitiveConversions.hpp"
  50 #include "oops/klass.hpp"
  51 #include "oops/method.inline.hpp"
  52 #include "oops/objArrayKlass.hpp"
  53 #include "oops/oop.inline.hpp"
  54 #include "prims/forte.hpp"
  55 #include "prims/jvmtiExport.hpp"
  56 #include "prims/jvmtiThreadState.hpp"
  57 #include "prims/methodHandles.hpp"
  58 #include "prims/nativeLookup.hpp"
  59 #include "runtime/atomic.hpp"
  60 #include "runtime/frame.inline.hpp"
  61 #include "runtime/handles.inline.hpp"
  62 #include "runtime/init.hpp"
  63 #include "runtime/interfaceSupport.inline.hpp"
  64 #include "runtime/java.hpp"
  65 #include "runtime/javaCalls.hpp"
  66 #include "runtime/jniHandles.inline.hpp"
  67 #include "runtime/perfData.inline.hpp"
  68 #include "runtime/sharedRuntime.hpp"
  69 #include "runtime/stackWatermarkSet.hpp"
  70 #include "runtime/stubRoutines.hpp"
  71 #include "runtime/synchronizer.hpp"
  72 #include "runtime/vframe.inline.hpp"
  73 #include "runtime/vframeArray.hpp"
  74 #include "runtime/vm_version.hpp"
  75 #include "services/management.hpp"
  76 #include "utilities/copy.hpp"
  77 #include "utilities/dtrace.hpp"
  78 #include "utilities/events.hpp"
  79 #include "utilities/resourceHash.hpp"
  80 #include "utilities/macros.hpp"
  81 #include "utilities/xmlstream.hpp"
  82 #ifdef COMPILER1
  83 #include "c1/c1_Runtime1.hpp"
  84 #endif
  85 #if INCLUDE_JFR
  86 #include "jfr/jfr.hpp"
  87 #endif
  88 
  89 // Shared stub locations
  90 RuntimeStub*        SharedRuntime::_wrong_method_blob;
  91 RuntimeStub*        SharedRuntime::_wrong_method_abstract_blob;
  92 RuntimeStub*        SharedRuntime::_ic_miss_blob;
  93 RuntimeStub*        SharedRuntime::_resolve_opt_virtual_call_blob;
  94 RuntimeStub*        SharedRuntime::_resolve_virtual_call_blob;
  95 RuntimeStub*        SharedRuntime::_resolve_static_call_blob;
  96 address             SharedRuntime::_resolve_static_call_entry;
  97 
  98 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  99 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
 100 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
 101 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
 102 
 103 #ifdef COMPILER2
 104 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
 105 #endif // COMPILER2
 106 
 107 nmethod*            SharedRuntime::_cont_doYield_stub;
 108 
 109 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
 110 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time     = nullptr;
 111 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time      = nullptr;
 112 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
 113 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time             = nullptr;
 114 
 115 //----------------------------generate_stubs-----------------------------------
 116 void SharedRuntime::generate_stubs() {
 117   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
 118   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
 119   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
 120   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
 121   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
 122   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
 123   _resolve_static_call_entry           = _resolve_static_call_blob->entry_point();
 124 
 125   AdapterHandlerLibrary::initialize();
 126 
 127 #if COMPILER2_OR_JVMCI
 128   // Vectors are generated only by C2 and JVMCI.
 129   bool support_wide = is_wide_vector(MaxVectorSize);
 130   if (support_wide) {
 131     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 132   }
 133 #endif // COMPILER2_OR_JVMCI
 134   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 135   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 136 
 137   generate_deopt_blob();
 138 
 139 #ifdef COMPILER2
 140   generate_uncommon_trap_blob();
 141 #endif // COMPILER2
 142   if (UsePerfData) {
 143     EXCEPTION_MARK;
 144     NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
 145     NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time,     SUN_CI, "resovle_virtual_call");
 146     NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time,      SUN_CI, "resovle_static_call");
 147     NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
 148     NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time ,            SUN_CI, "ic_miss");
 149     if (HAS_PENDING_EXCEPTION) {
 150       vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
 151     }
 152   }
 153 }
 154 
 155 void SharedRuntime::print_counters_on(outputStream* st) {
 156   st->print_cr("SharedRuntime:");
 157   if (UsePerfData) {
 158     st->print_cr("  resolve_opt_virtual_call: %5ldms (elapsed) %5ldms (thread) / %5d events",
 159                  _perf_resolve_opt_virtual_total_time->elapsed_counter_value_ms(),
 160                  _perf_resolve_opt_virtual_total_time->thread_counter_value_ms(),
 161                  _resolve_opt_virtual_ctr);
 162     st->print_cr("  resolve_virtual_call:     %5ldms (elapsed) %5ldms (thread) / %5d events",
 163                  _perf_resolve_virtual_total_time->elapsed_counter_value_ms(),
 164                  _perf_resolve_virtual_total_time->thread_counter_value_ms(),
 165                  _resolve_virtual_ctr);
 166     st->print_cr("  resolve_static_call:      %5ldms (elapsed) %5ldms (thread) / %5d events",
 167                  _perf_resolve_static_total_time->elapsed_counter_value_ms(),
 168                  _perf_resolve_static_total_time->thread_counter_value_ms(),
 169                  _resolve_static_ctr);
 170     st->print_cr("  handle_wrong_method:      %5ldms (elapsed) %5ldms (thread) / %5d events",
 171                  _perf_handle_wrong_method_total_time->elapsed_counter_value_ms(),
 172                  _perf_handle_wrong_method_total_time->thread_counter_value_ms(),
 173                  _wrong_method_ctr);
 174     st->print_cr("  ic_miss:                  %5ldms (elapsed) %5ldms (thread) / %5d events",
 175                  _perf_ic_miss_total_time->elapsed_counter_value_ms(),
 176                  _perf_ic_miss_total_time->thread_counter_value_ms(),
 177                  _ic_miss_ctr);
 178 
 179     jlong total_elapsed_time_ms = Management::ticks_to_ms(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
 180                                                           _perf_resolve_virtual_total_time->elapsed_counter_value() +
 181                                                           _perf_resolve_static_total_time->elapsed_counter_value() +
 182                                                           _perf_handle_wrong_method_total_time->elapsed_counter_value() +
 183                                                           _perf_ic_miss_total_time->elapsed_counter_value());
 184     jlong total_thread_time_ms = Management::ticks_to_ms(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
 185                                                           _perf_resolve_virtual_total_time->thread_counter_value() +
 186                                                           _perf_resolve_static_total_time->thread_counter_value() +
 187                                                           _perf_handle_wrong_method_total_time->thread_counter_value() +
 188                                                           _perf_ic_miss_total_time->thread_counter_value());
 189     st->print_cr("Total:                      %5ldms (elapsed) %5ldms (thread)", total_elapsed_time_ms, total_thread_time_ms);
 190   } else {
 191     st->print_cr("  no data (UsePerfData is turned off)");
 192   }
 193 }
 194 
 195 #include <math.h>
 196 
 197 // Implementation of SharedRuntime
 198 

 199 // For statistics
 200 uint SharedRuntime::_ic_miss_ctr = 0;
 201 uint SharedRuntime::_wrong_method_ctr = 0;
 202 uint SharedRuntime::_resolve_static_ctr = 0;
 203 uint SharedRuntime::_resolve_virtual_ctr = 0;
 204 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
 205 
 206 #ifndef PRODUCT
 207 uint SharedRuntime::_implicit_null_throws = 0;
 208 uint SharedRuntime::_implicit_div0_throws = 0;
 209 
 210 int64_t SharedRuntime::_nof_normal_calls = 0;
 211 int64_t SharedRuntime::_nof_inlined_calls = 0;
 212 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 213 int64_t SharedRuntime::_nof_static_calls = 0;
 214 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 215 int64_t SharedRuntime::_nof_interface_calls = 0;
 216 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 217 
 218 uint SharedRuntime::_new_instance_ctr=0;
 219 uint SharedRuntime::_new_array_ctr=0;
 220 uint SharedRuntime::_multi2_ctr=0;
 221 uint SharedRuntime::_multi3_ctr=0;
 222 uint SharedRuntime::_multi4_ctr=0;
 223 uint SharedRuntime::_multi5_ctr=0;
 224 uint SharedRuntime::_mon_enter_stub_ctr=0;
 225 uint SharedRuntime::_mon_exit_stub_ctr=0;
 226 uint SharedRuntime::_mon_enter_ctr=0;

 240 uint SharedRuntime::_unsafe_set_memory_ctr=0;
 241 
 242 int     SharedRuntime::_ICmiss_index                    = 0;
 243 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 244 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 245 
 246 
 247 void SharedRuntime::trace_ic_miss(address at) {
 248   for (int i = 0; i < _ICmiss_index; i++) {
 249     if (_ICmiss_at[i] == at) {
 250       _ICmiss_count[i]++;
 251       return;
 252     }
 253   }
 254   int index = _ICmiss_index++;
 255   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 256   _ICmiss_at[index] = at;
 257   _ICmiss_count[index] = 1;
 258 }
 259 
 260 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
 261   if (ICMissHistogram) {
 262     st->print_cr("IC Miss Histogram:");
 263     int tot_misses = 0;
 264     for (int i = 0; i < _ICmiss_index; i++) {
 265       st->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 266       tot_misses += _ICmiss_count[i];
 267     }
 268     st->print_cr("Total IC misses: %7d", tot_misses);
 269   }
 270 }
 271 #endif // !PRODUCT
 272 
 273 
 274 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 275   return x * y;
 276 JRT_END
 277 
 278 
 279 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 280   if (x == min_jlong && y == CONST64(-1)) {
 281     return x;
 282   } else {
 283     return x / y;
 284   }
 285 JRT_END
 286 
 287 
 288 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 289   if (x == min_jlong && y == CONST64(-1)) {
 290     return 0;
 291   } else {

 718   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 719   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 720   JNIHandles::destroy_local(vthread);
 721 JRT_END
 722 #endif // INCLUDE_JVMTI
 723 
 724 // The interpreter code to call this tracing function is only
 725 // called/generated when UL is on for redefine, class and has the right level
 726 // and tags. Since obsolete methods are never compiled, we don't have
 727 // to modify the compilers to generate calls to this function.
 728 //
 729 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 730     JavaThread* thread, Method* method))
 731   if (method->is_obsolete()) {
 732     // We are calling an obsolete method, but this is not necessarily
 733     // an error. Our method could have been redefined just after we
 734     // fetched the Method* from the constant pool.
 735     ResourceMark rm;
 736     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 737   }
 738 
 739   LogStreamHandle(Trace, interpreter, bytecode) log;
 740   if (log.is_enabled()) {
 741     ResourceMark rm;
 742     log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
 743               p2i(thread),
 744               (method->is_static() ? "static" : "virtual"),
 745               method->name_and_sig_as_C_string(),
 746               (method->is_native() ? " native" : ""),
 747               (thread->class_being_initialized() != nullptr ? " clinit" : ""),
 748               (method->method_holder()->is_initialized() ? "" : " being_initialized"));
 749   }
 750   return 0;
 751 JRT_END
 752 
 753 // ret_pc points into caller; we are returning caller's exception handler
 754 // for given exception
 755 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 756 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
 757                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 758   assert(nm != nullptr, "must exist");
 759   ResourceMark rm;
 760 
 761 #if INCLUDE_JVMCI
 762   if (nm->is_compiled_by_jvmci()) {
 763     // lookup exception handler for this pc
 764     int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
 765     ExceptionHandlerTable table(nm);
 766     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 767     if (t != nullptr) {
 768       return nm->code_begin() + t->pco();
 769     } else {

1369 
1370   // determine call info & receiver
1371   // note: a) receiver is null for static calls
1372   //       b) an exception is thrown if receiver is null for non-static calls
1373   CallInfo call_info;
1374   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1375   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1376 
1377   NoSafepointVerifier nsv;
1378 
1379   methodHandle callee_method(current, call_info.selected_method());
1380 
1381   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1382          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1383          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1384          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1385          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1386 
1387   assert(!caller_nm->is_unloading(), "It should not be unloading");
1388 

1389   // tracing/debugging/statistics
1390   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1391                  (is_virtual) ? (&_resolve_virtual_ctr) :
1392                                 (&_resolve_static_ctr);
1393   Atomic::inc(addr);
1394 
1395 #ifndef PRODUCT
1396   if (TraceCallFixup) {
1397     ResourceMark rm(current);
1398     tty->print("resolving %s%s (%s) call to",
1399                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1400                Bytecodes::name(invoke_code));
1401     callee_method->print_short_name(tty);
1402     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1403                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1404   }
1405 #endif
1406 
1407   if (invoke_code == Bytecodes::_invokestatic) {
1408     assert(callee_method->method_holder()->is_initialized() ||
1409            callee_method->method_holder()->is_init_thread(current),
1410            "invalid class initialization state for invoke_static");
1411     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1412       // In order to keep class initialization check, do not patch call
1413       // site for static call when the class is not fully initialized.
1414       // Proper check is enforced by call site re-resolution on every invocation.
1415       //

1431 
1432   // Make sure the callee nmethod does not get deoptimized and removed before
1433   // we are done patching the code.
1434 
1435 
1436   CompiledICLocker ml(caller_nm);
1437   if (is_virtual && !is_optimized) {
1438     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1439     inline_cache->update(&call_info, receiver->klass());
1440   } else {
1441     // Callsite is a direct call - set it to the destination method
1442     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1443     callsite->set(callee_method);
1444   }
1445 
1446   return callee_method;
1447 }
1448 
1449 // Inline caches exist only in compiled code
1450 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1451   PerfTraceTime timer(_perf_ic_miss_total_time);
1452 
1453 #ifdef ASSERT
1454   RegisterMap reg_map(current,
1455                       RegisterMap::UpdateMap::skip,
1456                       RegisterMap::ProcessFrames::include,
1457                       RegisterMap::WalkContinuation::skip);
1458   frame stub_frame = current->last_frame();
1459   assert(stub_frame.is_runtime_frame(), "sanity check");
1460   frame caller_frame = stub_frame.sender(&reg_map);
1461   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1462 #endif /* ASSERT */
1463 
1464   methodHandle callee_method;
1465   JRT_BLOCK
1466     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1467     // Return Method* through TLS
1468     current->set_vm_result_2(callee_method());
1469   JRT_BLOCK_END
1470   // return compiled code entry point after potential safepoints
1471   return get_resolved_entry(current, callee_method);
1472 JRT_END
1473 
1474 
1475 // Handle call site that has been made non-entrant
1476 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1477   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1478 
1479   // 6243940 We might end up in here if the callee is deoptimized
1480   // as we race to call it.  We don't want to take a safepoint if
1481   // the caller was interpreted because the caller frame will look
1482   // interpreted to the stack walkers and arguments are now
1483   // "compiled" so it is much better to make this transition
1484   // invisible to the stack walking code. The i2c path will
1485   // place the callee method in the callee_target. It is stashed
1486   // there because if we try and find the callee by normal means a
1487   // safepoint is possible and have trouble gc'ing the compiled args.
1488   RegisterMap reg_map(current,
1489                       RegisterMap::UpdateMap::skip,
1490                       RegisterMap::ProcessFrames::include,
1491                       RegisterMap::WalkContinuation::skip);
1492   frame stub_frame = current->last_frame();
1493   assert(stub_frame.is_runtime_frame(), "sanity check");
1494   frame caller_frame = stub_frame.sender(&reg_map);
1495 
1496   if (caller_frame.is_interpreted_frame() ||
1497       caller_frame.is_entry_frame() ||
1498       caller_frame.is_upcall_stub_frame()) {

1511       // so bypassing it in c2i adapter is benign.
1512       return callee->get_c2i_no_clinit_check_entry();
1513     } else {
1514       return callee->get_c2i_entry();
1515     }
1516   }
1517 
1518   // Must be compiled to compiled path which is safe to stackwalk
1519   methodHandle callee_method;
1520   JRT_BLOCK
1521     // Force resolving of caller (if we called from compiled frame)
1522     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1523     current->set_vm_result_2(callee_method());
1524   JRT_BLOCK_END
1525   // return compiled code entry point after potential safepoints
1526   return get_resolved_entry(current, callee_method);
1527 JRT_END
1528 
1529 // Handle abstract method call
1530 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1531   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1532 
1533   // Verbose error message for AbstractMethodError.
1534   // Get the called method from the invoke bytecode.
1535   vframeStream vfst(current, true);
1536   assert(!vfst.at_end(), "Java frame must exist");
1537   methodHandle caller(current, vfst.method());
1538   Bytecode_invoke invoke(caller, vfst.bci());
1539   DEBUG_ONLY( invoke.verify(); )
1540 
1541   // Find the compiled caller frame.
1542   RegisterMap reg_map(current,
1543                       RegisterMap::UpdateMap::include,
1544                       RegisterMap::ProcessFrames::include,
1545                       RegisterMap::WalkContinuation::skip);
1546   frame stubFrame = current->last_frame();
1547   assert(stubFrame.is_runtime_frame(), "must be");
1548   frame callerFrame = stubFrame.sender(&reg_map);
1549   assert(callerFrame.is_compiled_frame(), "must be");
1550 
1551   // Install exception and return forward entry.
1552   address res = StubRoutines::throw_AbstractMethodError_entry();

1559       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1560     }
1561   JRT_BLOCK_END
1562   return res;
1563 JRT_END
1564 
1565 // return verified_code_entry if interp_only_mode is not set for the current thread;
1566 // otherwise return c2i entry.
1567 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1568   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1569     // In interp_only_mode we need to go to the interpreted entry
1570     // The c2i won't patch in this mode -- see fixup_callers_callsite
1571     return callee_method->get_c2i_entry();
1572   }
1573   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1574   return callee_method->verified_code_entry();
1575 }
1576 
1577 // resolve a static call and patch code
1578 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1579   PerfTraceTime timer(_perf_resolve_static_total_time);
1580 
1581   methodHandle callee_method;
1582   bool enter_special = false;
1583   JRT_BLOCK
1584     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1585     current->set_vm_result_2(callee_method());
1586   JRT_BLOCK_END
1587   // return compiled code entry point after potential safepoints
1588   return get_resolved_entry(current, callee_method);
1589 JRT_END
1590 
1591 // resolve virtual call and update inline cache to monomorphic
1592 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1593   PerfTraceTime timer(_perf_resolve_virtual_total_time);
1594 
1595   methodHandle callee_method;
1596   JRT_BLOCK
1597     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1598     current->set_vm_result_2(callee_method());
1599   JRT_BLOCK_END
1600   // return compiled code entry point after potential safepoints
1601   return get_resolved_entry(current, callee_method);
1602 JRT_END
1603 
1604 
1605 // Resolve a virtual call that can be statically bound (e.g., always
1606 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1607 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1608   PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1609 
1610   methodHandle callee_method;
1611   JRT_BLOCK
1612     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1613     current->set_vm_result_2(callee_method());
1614   JRT_BLOCK_END
1615   // return compiled code entry point after potential safepoints
1616   return get_resolved_entry(current, callee_method);
1617 JRT_END
1618 
1619 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1620   JavaThread* current = THREAD;
1621   ResourceMark rm(current);
1622   CallInfo call_info;
1623   Bytecodes::Code bc;
1624 
1625   // receiver is null for static calls. An exception is thrown for null
1626   // receivers for non-static calls
1627   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1628 
1629   methodHandle callee_method(current, call_info.selected_method());
1630 

1631   Atomic::inc(&_ic_miss_ctr);
1632 
1633 #ifndef PRODUCT
1634   // Statistics & Tracing
1635   if (TraceCallFixup) {
1636     ResourceMark rm(current);
1637     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1638     callee_method->print_short_name(tty);
1639     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1640   }
1641 
1642   if (ICMissHistogram) {
1643     MutexLocker m(VMStatistic_lock);
1644     RegisterMap reg_map(current,
1645                         RegisterMap::UpdateMap::skip,
1646                         RegisterMap::ProcessFrames::include,
1647                         RegisterMap::WalkContinuation::skip);
1648     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1649     // produce statistics under the lock
1650     trace_ic_miss(f.pc());
1651   }
1652 #endif
1653 

1736             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1737             cdc->set_to_clean();
1738             break;
1739           }
1740 
1741           case relocInfo::virtual_call_type: {
1742             // compiled, dispatched call (which used to call an interpreted method)
1743             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1744             inline_cache->set_to_clean();
1745             break;
1746           }
1747           default:
1748             break;
1749         }
1750       }
1751     }
1752   }
1753 
1754   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1755 


1756   Atomic::inc(&_wrong_method_ctr);
1757 
1758 #ifndef PRODUCT
1759   if (TraceCallFixup) {
1760     ResourceMark rm(current);
1761     tty->print("handle_wrong_method reresolving call to");
1762     callee_method->print_short_name(tty);
1763     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1764   }
1765 #endif
1766 
1767   return callee_method;
1768 }
1769 
1770 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1771   // The faulting unsafe accesses should be changed to throw the error
1772   // synchronously instead. Meanwhile the faulting instruction will be
1773   // skipped over (effectively turning it into a no-op) and an
1774   // asynchronous exception will be raised which the thread will
1775   // handle at a later point. If the instruction is a load it will
1776   // return garbage.
1777 
1778   // Request an async exception.

2016 // This is only called when CheckJNICalls is true, and only
2017 // for virtual thread termination.
2018 JRT_LEAF(void,  SharedRuntime::log_jni_monitor_still_held())
2019   assert(CheckJNICalls, "Only call this when checking JNI usage");
2020   if (log_is_enabled(Debug, jni)) {
2021     JavaThread* current = JavaThread::current();
2022     int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2023     int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2024     log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2025                    ") exiting with Objects still locked by JNI MonitorEnter.",
2026                    vthread_id, carrier_id);
2027   }
2028 JRT_END
2029 
2030 #ifndef PRODUCT
2031 
2032 void SharedRuntime::print_statistics() {
2033   ttyLocker ttyl;
2034   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
2035 
2036   SharedRuntime::print_ic_miss_histogram_on(tty);
2037   SharedRuntime::print_counters_on(tty);
2038   AdapterHandlerLibrary::print_statistics_on(tty);
































2039 
2040   if (xtty != nullptr)  xtty->tail("statistics");
2041 }
2042 
2043 //void SharedRuntime::print_counters_on(outputStream* st) {
2044 //  // Dump the JRT_ENTRY counters
2045 //  if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2046 //  if (_new_array_ctr)    st->print_cr("%5u new array requires GC", _new_array_ctr);
2047 //  if (_multi2_ctr)       st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2048 //  if (_multi3_ctr)       st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2049 //  if (_multi4_ctr)       st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2050 //  if (_multi5_ctr)       st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2051 //
2052 //  st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2053 //  st->print_cr("%5u wrong method", _wrong_method_ctr);
2054 //  st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2055 //  st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2056 //  st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2057 //
2058 //  if (_mon_enter_stub_ctr)       st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2059 //  if (_mon_exit_stub_ctr)        st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2060 //  if (_mon_enter_ctr)            st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2061 //  if (_mon_exit_ctr)             st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2062 //  if (_partial_subtype_ctr)      st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2063 //  if (_jbyte_array_copy_ctr)     st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2064 //  if (_jshort_array_copy_ctr)    st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2065 //  if (_jint_array_copy_ctr)      st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2066 //  if (_jlong_array_copy_ctr)     st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2067 //  if (_oop_array_copy_ctr)       st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2068 //  if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2069 //  if (_unsafe_array_copy_ctr)    st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2070 //  if (_generic_array_copy_ctr)   st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2071 //  if (_slow_array_copy_ctr)      st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2072 //  if (_find_handler_ctr)         st->print_cr("%5u find exception handler", _find_handler_ctr);
2073 //  if (_rethrow_ctr)              st->print_cr("%5u rethrow handler", _rethrow_ctr);
2074 //  if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2075 //}
2076 
2077 inline double percent(int64_t x, int64_t y) {
2078   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2079 }
2080 
2081 class MethodArityHistogram {
2082  public:
2083   enum { MAX_ARITY = 256 };
2084  private:
2085   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2086   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2087   static uint64_t _total_compiled_calls;
2088   static uint64_t _max_compiled_calls_per_method;
2089   static int _max_arity;                       // max. arity seen
2090   static int _max_size;                        // max. arg size seen
2091 
2092   static void add_method_to_histogram(nmethod* nm) {
2093     Method* method = (nm == nullptr) ? nullptr : nm->method();
2094     if (method != nullptr) {
2095       ArgumentCount args(method->signature());
2096       int arity   = args.size() + (method->is_static() ? 0 : 1);

2141     // Take the Compile_lock to protect against changes in the CodeBlob structures
2142     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2143     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2144     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2145     _max_arity = _max_size = 0;
2146     _total_compiled_calls = 0;
2147     _max_compiled_calls_per_method = 0;
2148     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2149     CodeCache::nmethods_do(add_method_to_histogram);
2150     print_histogram();
2151   }
2152 };
2153 
2154 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2155 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2156 uint64_t MethodArityHistogram::_total_compiled_calls;
2157 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2158 int MethodArityHistogram::_max_arity;
2159 int MethodArityHistogram::_max_size;
2160 
2161 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2162   tty->print_cr("Calls from compiled code:");
2163   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2164   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2165   int64_t mono_i = _nof_interface_calls;
2166   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2167   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2168   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2169   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2170   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2171   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2172   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2173   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2174   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2175   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2176   tty->cr();
2177   tty->print_cr("Note 1: counter updates are not MT-safe.");
2178   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2179   tty->print_cr("        %% in nested categories are relative to their category");
2180   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2181   tty->cr();

2394                   AdapterFingerPrint::equals>;
2395 static AdapterHandlerTable* _adapter_handler_table;
2396 
2397 // Find a entry with the same fingerprint if it exists
2398 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2399   NOT_PRODUCT(_lookups++);
2400   assert_lock_strong(AdapterHandlerLibrary_lock);
2401   AdapterFingerPrint fp(total_args_passed, sig_bt);
2402   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2403   if (entry != nullptr) {
2404 #ifndef PRODUCT
2405     if (fp.is_compact()) _compact++;
2406     _hits++;
2407 #endif
2408     return *entry;
2409   }
2410   return nullptr;
2411 }
2412 
2413 #ifndef PRODUCT
2414 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2415   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2416     return sizeof(*key) + sizeof(*a);
2417   };
2418   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2419   ts.print(st, "AdapterHandlerTable");
2420   st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2421                _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2422   st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2423                _lookups, _equals, _hits, _compact);
2424 }
2425 #endif // !PRODUCT
2426 
2427 // ---------------------------------------------------------------------------
2428 // Implementation of AdapterHandlerLibrary
2429 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2430 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2431 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2432 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2433 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2434 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2435 const int AdapterHandlerLibrary_size = 16*K;
2436 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2437 
2438 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2439   return _buffer;
2440 }
2441 
2442 static void post_adapter_creation(const AdapterBlob* new_adapter,
2443                                   const AdapterHandlerEntry* entry) {
2444   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2445     char blob_id[256];

2639         delete comparison_entry;
2640       }
2641 #endif
2642       return entry;
2643     }
2644 
2645     entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2646   }
2647 
2648   // Outside of the lock
2649   if (new_adapter != nullptr) {
2650     post_adapter_creation(new_adapter, entry);
2651   }
2652   return entry;
2653 }
2654 
2655 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2656                                                            int total_args_passed,
2657                                                            BasicType* sig_bt,
2658                                                            bool allocate_code_blob) {
2659   if (UsePerfData) {
2660     ClassLoader::perf_method_adapters_count()->inc();
2661   }
2662 
2663   // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2664   // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2665   // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2666   // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2667   bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2668 
2669   VMRegPair stack_regs[16];
2670   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2671 
2672   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2673   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2674   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2675   CodeBuffer buffer(buf);
2676   short buffer_locs[20];
2677   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2678                                           sizeof(buffer_locs)/sizeof(relocInfo));
2679 
2680   // Make a C heap allocated version of the fingerprint to store in the adapter
2681   AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);

3091   assert(found, "Should have found handler");
3092 }
3093 
3094 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3095   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3096   if (get_i2c_entry() != nullptr) {
3097     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3098   }
3099   if (get_c2i_entry() != nullptr) {
3100     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3101   }
3102   if (get_c2i_unverified_entry() != nullptr) {
3103     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3104   }
3105   if (get_c2i_no_clinit_check_entry() != nullptr) {
3106     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3107   }
3108   st->cr();
3109 }
3110 








3111 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3112   assert(current == JavaThread::current(), "pre-condition");
3113   StackOverflow* overflow_state = current->stack_overflow_state();
3114   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3115   overflow_state->set_reserved_stack_activation(current->stack_base());
3116 JRT_END
3117 
3118 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3119   ResourceMark rm(current);
3120   frame activation;
3121   nmethod* nm = nullptr;
3122   int count = 1;
3123 
3124   assert(fr.is_java_frame(), "Must start on Java frame");
3125 
3126   RegisterMap map(JavaThread::current(),
3127                   RegisterMap::UpdateMap::skip,
3128                   RegisterMap::ProcessFrames::skip,
3129                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3130   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
< prev index next >