< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "classfile/javaClasses.inline.hpp"
  27 #include "classfile/stringTable.hpp"
  28 #include "classfile/vmClasses.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "code/codeCache.hpp"
  31 #include "code/compiledIC.hpp"
  32 #include "code/compiledMethod.inline.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "code/vtableStubs.hpp"
  35 #include "compiler/abstractCompiler.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/disassembler.hpp"
  38 #include "gc/shared/barrierSet.hpp"
  39 #include "gc/shared/collectedHeap.hpp"
  40 #include "gc/shared/gcLocker.inline.hpp"
  41 #include "interpreter/interpreter.hpp"
  42 #include "interpreter/interpreterRuntime.hpp"
  43 #include "jvm.h"
  44 #include "jfr/jfrEvents.hpp"
  45 #include "logging/log.hpp"
  46 #include "memory/resourceArea.hpp"
  47 #include "memory/universe.hpp"
  48 #include "metaprogramming/primitiveConversions.hpp"
  49 #include "oops/klass.hpp"
  50 #include "oops/method.inline.hpp"
  51 #include "oops/objArrayKlass.hpp"
  52 #include "oops/oop.inline.hpp"
  53 #include "prims/forte.hpp"
  54 #include "prims/jvmtiExport.hpp"
  55 #include "prims/jvmtiThreadState.hpp"
  56 #include "prims/methodHandles.hpp"
  57 #include "prims/nativeLookup.hpp"
  58 #include "runtime/atomic.hpp"
  59 #include "runtime/frame.inline.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/init.hpp"
  62 #include "runtime/interfaceSupport.inline.hpp"
  63 #include "runtime/java.hpp"
  64 #include "runtime/javaCalls.hpp"
  65 #include "runtime/jniHandles.inline.hpp"

  66 #include "runtime/sharedRuntime.hpp"
  67 #include "runtime/stackWatermarkSet.hpp"
  68 #include "runtime/stubRoutines.hpp"
  69 #include "runtime/synchronizer.hpp"
  70 #include "runtime/vframe.inline.hpp"
  71 #include "runtime/vframeArray.hpp"
  72 #include "runtime/vm_version.hpp"

  73 #include "utilities/copy.hpp"
  74 #include "utilities/dtrace.hpp"
  75 #include "utilities/events.hpp"
  76 #include "utilities/resourceHash.hpp"
  77 #include "utilities/macros.hpp"
  78 #include "utilities/xmlstream.hpp"
  79 #ifdef COMPILER1
  80 #include "c1/c1_Runtime1.hpp"
  81 #endif
  82 #if INCLUDE_JFR
  83 #include "jfr/jfr.hpp"
  84 #endif
  85 
  86 // Shared stub locations
  87 RuntimeStub*        SharedRuntime::_wrong_method_blob;
  88 RuntimeStub*        SharedRuntime::_wrong_method_abstract_blob;
  89 RuntimeStub*        SharedRuntime::_ic_miss_blob;
  90 RuntimeStub*        SharedRuntime::_resolve_opt_virtual_call_blob;
  91 RuntimeStub*        SharedRuntime::_resolve_virtual_call_blob;
  92 RuntimeStub*        SharedRuntime::_resolve_static_call_blob;
  93 address             SharedRuntime::_resolve_static_call_entry;
  94 
  95 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  96 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
  97 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
  98 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
  99 
 100 #ifdef COMPILER2
 101 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
 102 #endif // COMPILER2
 103 
 104 nmethod*            SharedRuntime::_cont_doYield_stub;
 105 






 106 //----------------------------generate_stubs-----------------------------------
 107 void SharedRuntime::generate_stubs() {
 108   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
 109   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
 110   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
 111   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
 112   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
 113   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
 114   _resolve_static_call_entry           = _resolve_static_call_blob->entry_point();
 115 
 116   AdapterHandlerLibrary::initialize();
 117 
 118 #if COMPILER2_OR_JVMCI
 119   // Vectors are generated only by C2 and JVMCI.
 120   bool support_wide = is_wide_vector(MaxVectorSize);
 121   if (support_wide) {
 122     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 123   }
 124 #endif // COMPILER2_OR_JVMCI
 125   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 126   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 127 
 128   generate_deopt_blob();
 129 
 130 #ifdef COMPILER2
 131   generate_uncommon_trap_blob();
 132 #endif // COMPILER2



















































 133 }
 134 
 135 #include <math.h>
 136 
 137 // Implementation of SharedRuntime
 138 
 139 #ifndef PRODUCT
 140 // For statistics
 141 uint SharedRuntime::_ic_miss_ctr = 0;
 142 uint SharedRuntime::_wrong_method_ctr = 0;
 143 uint SharedRuntime::_resolve_static_ctr = 0;
 144 uint SharedRuntime::_resolve_virtual_ctr = 0;
 145 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;


 146 uint SharedRuntime::_implicit_null_throws = 0;
 147 uint SharedRuntime::_implicit_div0_throws = 0;
 148 
 149 int64_t SharedRuntime::_nof_normal_calls = 0;
 150 int64_t SharedRuntime::_nof_inlined_calls = 0;
 151 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 152 int64_t SharedRuntime::_nof_static_calls = 0;
 153 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 154 int64_t SharedRuntime::_nof_interface_calls = 0;
 155 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 156 
 157 uint SharedRuntime::_new_instance_ctr=0;
 158 uint SharedRuntime::_new_array_ctr=0;
 159 uint SharedRuntime::_multi2_ctr=0;
 160 uint SharedRuntime::_multi3_ctr=0;
 161 uint SharedRuntime::_multi4_ctr=0;
 162 uint SharedRuntime::_multi5_ctr=0;
 163 uint SharedRuntime::_mon_enter_stub_ctr=0;
 164 uint SharedRuntime::_mon_exit_stub_ctr=0;
 165 uint SharedRuntime::_mon_enter_ctr=0;

 178 uint SharedRuntime::_rethrow_ctr=0;
 179 
 180 int     SharedRuntime::_ICmiss_index                    = 0;
 181 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 182 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 183 
 184 
 185 void SharedRuntime::trace_ic_miss(address at) {
 186   for (int i = 0; i < _ICmiss_index; i++) {
 187     if (_ICmiss_at[i] == at) {
 188       _ICmiss_count[i]++;
 189       return;
 190     }
 191   }
 192   int index = _ICmiss_index++;
 193   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 194   _ICmiss_at[index] = at;
 195   _ICmiss_count[index] = 1;
 196 }
 197 
 198 void SharedRuntime::print_ic_miss_histogram() {
 199   if (ICMissHistogram) {
 200     tty->print_cr("IC Miss Histogram:");
 201     int tot_misses = 0;
 202     for (int i = 0; i < _ICmiss_index; i++) {
 203       tty->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 204       tot_misses += _ICmiss_count[i];
 205     }
 206     tty->print_cr("Total IC misses: %7d", tot_misses);
 207   }
 208 }
 209 #endif // PRODUCT
 210 
 211 
 212 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 213   return x * y;
 214 JRT_END
 215 
 216 
 217 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 218   if (x == min_jlong && y == CONST64(-1)) {
 219     return x;
 220   } else {
 221     return x / y;
 222   }
 223 JRT_END
 224 
 225 
 226 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 227   if (x == min_jlong && y == CONST64(-1)) {
 228     return 0;
 229   } else {

 660   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 661   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 662   JNIHandles::destroy_local(vthread);
 663 JRT_END
 664 #endif // INCLUDE_JVMTI
 665 
 666 // The interpreter code to call this tracing function is only
 667 // called/generated when UL is on for redefine, class and has the right level
 668 // and tags. Since obsolete methods are never compiled, we don't have
 669 // to modify the compilers to generate calls to this function.
 670 //
 671 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 672     JavaThread* thread, Method* method))
 673   if (method->is_obsolete()) {
 674     // We are calling an obsolete method, but this is not necessarily
 675     // an error. Our method could have been redefined just after we
 676     // fetched the Method* from the constant pool.
 677     ResourceMark rm;
 678     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 679   }












 680   return 0;
 681 JRT_END
 682 
 683 // ret_pc points into caller; we are returning caller's exception handler
 684 // for given exception
 685 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 686 address SharedRuntime::compute_compiled_exc_handler(CompiledMethod* cm, address ret_pc, Handle& exception,
 687                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 688   assert(cm != nullptr, "must exist");
 689   ResourceMark rm;
 690 
 691 #if INCLUDE_JVMCI
 692   if (cm->is_compiled_by_jvmci()) {
 693     // lookup exception handler for this pc
 694     int catch_pco = pointer_delta_as_int(ret_pc, cm->code_begin());
 695     ExceptionHandlerTable table(cm);
 696     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 697     if (t != nullptr) {
 698       return cm->code_begin() + t->pco();
 699     } else {

1300 
1301   // determine call info & receiver
1302   // note: a) receiver is null for static calls
1303   //       b) an exception is thrown if receiver is null for non-static calls
1304   CallInfo call_info;
1305   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1306   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1307 
1308   NoSafepointVerifier nsv;
1309 
1310   methodHandle callee_method(current, call_info.selected_method());
1311 
1312   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1313          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1314          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1315          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1316          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1317 
1318   assert(!caller_nm->is_unloading(), "It should not be unloading");
1319 
1320 #ifndef PRODUCT
1321   // tracing/debugging/statistics
1322   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1323                  (is_virtual) ? (&_resolve_virtual_ctr) :
1324                                 (&_resolve_static_ctr);
1325   Atomic::inc(addr);
1326 

1327   if (TraceCallFixup) {
1328     ResourceMark rm(current);
1329     tty->print("resolving %s%s (%s) call to",
1330                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1331                Bytecodes::name(invoke_code));
1332     callee_method->print_short_name(tty);
1333     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1334                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1335   }
1336 #endif
1337 
1338   if (invoke_code == Bytecodes::_invokestatic) {
1339     assert(callee_method->method_holder()->is_initialized() ||
1340            callee_method->method_holder()->is_init_thread(current),
1341            "invalid class initialization state for invoke_static");
1342     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1343       // In order to keep class initialization check, do not patch call
1344       // site for static call when the class is not fully initialized.
1345       // Proper check is enforced by call site re-resolution on every invocation.
1346       //

1362 
1363   // Make sure the callee nmethod does not get deoptimized and removed before
1364   // we are done patching the code.
1365 
1366 
1367   CompiledICLocker ml(caller_nm);
1368   if (is_virtual && !is_optimized) {
1369     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1370     inline_cache->update(&call_info, receiver->klass());
1371   } else {
1372     // Callsite is a direct call - set it to the destination method
1373     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1374     callsite->set(callee_method);
1375   }
1376 
1377   return callee_method;
1378 }
1379 
1380 // Inline caches exist only in compiled code
1381 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))


1382 #ifdef ASSERT
1383   RegisterMap reg_map(current,
1384                       RegisterMap::UpdateMap::skip,
1385                       RegisterMap::ProcessFrames::include,
1386                       RegisterMap::WalkContinuation::skip);
1387   frame stub_frame = current->last_frame();
1388   assert(stub_frame.is_runtime_frame(), "sanity check");
1389   frame caller_frame = stub_frame.sender(&reg_map);
1390   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1391 #endif /* ASSERT */
1392 
1393   methodHandle callee_method;
1394   JRT_BLOCK
1395     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1396     // Return Method* through TLS
1397     current->set_vm_result_2(callee_method());
1398   JRT_BLOCK_END
1399   // return compiled code entry point after potential safepoints
1400   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1401   return callee_method->verified_code_entry();
1402 JRT_END
1403 
1404 
1405 // Handle call site that has been made non-entrant
1406 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))


1407   // 6243940 We might end up in here if the callee is deoptimized
1408   // as we race to call it.  We don't want to take a safepoint if
1409   // the caller was interpreted because the caller frame will look
1410   // interpreted to the stack walkers and arguments are now
1411   // "compiled" so it is much better to make this transition
1412   // invisible to the stack walking code. The i2c path will
1413   // place the callee method in the callee_target. It is stashed
1414   // there because if we try and find the callee by normal means a
1415   // safepoint is possible and have trouble gc'ing the compiled args.
1416   RegisterMap reg_map(current,
1417                       RegisterMap::UpdateMap::skip,
1418                       RegisterMap::ProcessFrames::include,
1419                       RegisterMap::WalkContinuation::skip);
1420   frame stub_frame = current->last_frame();
1421   assert(stub_frame.is_runtime_frame(), "sanity check");
1422   frame caller_frame = stub_frame.sender(&reg_map);
1423 
1424   if (caller_frame.is_interpreted_frame() ||
1425       caller_frame.is_entry_frame() ||
1426       caller_frame.is_upcall_stub_frame()) {

1440       return callee->get_c2i_no_clinit_check_entry();
1441     } else {
1442       return callee->get_c2i_entry();
1443     }
1444   }
1445 
1446   // Must be compiled to compiled path which is safe to stackwalk
1447   methodHandle callee_method;
1448   JRT_BLOCK
1449     // Force resolving of caller (if we called from compiled frame)
1450     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1451     current->set_vm_result_2(callee_method());
1452   JRT_BLOCK_END
1453   // return compiled code entry point after potential safepoints
1454   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1455   return callee_method->verified_code_entry();
1456 JRT_END
1457 
1458 // Handle abstract method call
1459 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))


1460   // Verbose error message for AbstractMethodError.
1461   // Get the called method from the invoke bytecode.
1462   vframeStream vfst(current, true);
1463   assert(!vfst.at_end(), "Java frame must exist");
1464   methodHandle caller(current, vfst.method());
1465   Bytecode_invoke invoke(caller, vfst.bci());
1466   DEBUG_ONLY( invoke.verify(); )
1467 
1468   // Find the compiled caller frame.
1469   RegisterMap reg_map(current,
1470                       RegisterMap::UpdateMap::include,
1471                       RegisterMap::ProcessFrames::include,
1472                       RegisterMap::WalkContinuation::skip);
1473   frame stubFrame = current->last_frame();
1474   assert(stubFrame.is_runtime_frame(), "must be");
1475   frame callerFrame = stubFrame.sender(&reg_map);
1476   assert(callerFrame.is_compiled_frame(), "must be");
1477 
1478   // Install exception and return forward entry.
1479   address res = StubRoutines::throw_AbstractMethodError_entry();
1480   JRT_BLOCK
1481     methodHandle callee(current, invoke.static_target(current));
1482     if (!callee.is_null()) {
1483       oop recv = callerFrame.retrieve_receiver(&reg_map);
1484       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1485       res = StubRoutines::forward_exception_entry();
1486       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1487     }
1488   JRT_BLOCK_END
1489   return res;
1490 JRT_END
1491 
1492 
1493 // resolve a static call and patch code
1494 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))


1495   methodHandle callee_method;
1496   bool enter_special = false;
1497   JRT_BLOCK
1498     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1499     current->set_vm_result_2(callee_method());
1500 
1501     if (current->is_interp_only_mode()) {
1502       RegisterMap reg_map(current,
1503                           RegisterMap::UpdateMap::skip,
1504                           RegisterMap::ProcessFrames::include,
1505                           RegisterMap::WalkContinuation::skip);
1506       frame stub_frame = current->last_frame();
1507       assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1508       frame caller = stub_frame.sender(&reg_map);
1509       enter_special = caller.cb() != nullptr && caller.cb()->is_compiled()
1510         && caller.cb()->as_compiled_method()->method()->is_continuation_enter_intrinsic();
1511     }
1512   JRT_BLOCK_END
1513 
1514   if (current->is_interp_only_mode() && enter_special) {
1515     // enterSpecial is compiled and calls this method to resolve the call to Continuation::enter
1516     // but in interp_only_mode we need to go to the interpreted entry
1517     // The c2i won't patch in this mode -- see fixup_callers_callsite
1518     //
1519     // This should probably be done in all cases, not just enterSpecial (see JDK-8218403),
1520     // but that's part of a larger fix, and the situation is worse for enterSpecial, as it has no
1521     // interpreted version.
1522     return callee_method->get_c2i_entry();
1523   }
1524 
1525   // return compiled code entry point after potential safepoints
1526   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1527   return callee_method->verified_code_entry();
1528 JRT_END
1529 
1530 
1531 // resolve virtual call and update inline cache to monomorphic
1532 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))


1533   methodHandle callee_method;
1534   JRT_BLOCK
1535     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1536     current->set_vm_result_2(callee_method());
1537   JRT_BLOCK_END
1538   // return compiled code entry point after potential safepoints
1539   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1540   return callee_method->verified_code_entry();
1541 JRT_END
1542 
1543 
1544 // Resolve a virtual call that can be statically bound (e.g., always
1545 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1546 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))


1547   methodHandle callee_method;
1548   JRT_BLOCK
1549     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1550     current->set_vm_result_2(callee_method());
1551   JRT_BLOCK_END
1552   // return compiled code entry point after potential safepoints
1553   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1554   return callee_method->verified_code_entry();
1555 JRT_END
1556 
1557 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1558   JavaThread* current = THREAD;
1559   ResourceMark rm(current);
1560   CallInfo call_info;
1561   Bytecodes::Code bc;
1562 
1563   // receiver is null for static calls. An exception is thrown for null
1564   // receivers for non-static calls
1565   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1566 
1567   methodHandle callee_method(current, call_info.selected_method());
1568 
1569 #ifndef PRODUCT
1570   Atomic::inc(&_ic_miss_ctr);
1571 

1572   // Statistics & Tracing
1573   if (TraceCallFixup) {
1574     ResourceMark rm(current);
1575     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1576     callee_method->print_short_name(tty);
1577     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1578   }
1579 
1580   if (ICMissHistogram) {
1581     MutexLocker m(VMStatistic_lock);
1582     RegisterMap reg_map(current,
1583                         RegisterMap::UpdateMap::skip,
1584                         RegisterMap::ProcessFrames::include,
1585                         RegisterMap::WalkContinuation::skip);
1586     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1587     // produce statistics under the lock
1588     trace_ic_miss(f.pc());
1589   }
1590 #endif
1591 

1673             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1674             cdc->set_to_clean();
1675             break;
1676           }
1677 
1678           case relocInfo::virtual_call_type: {
1679             // compiled, dispatched call (which used to call an interpreted method)
1680             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1681             inline_cache->set_to_clean();
1682             break;
1683           }
1684           default:
1685             break;
1686         }
1687       }
1688     }
1689   }
1690 
1691   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1692 
1693 
1694 #ifndef PRODUCT
1695   Atomic::inc(&_wrong_method_ctr);
1696 

1697   if (TraceCallFixup) {
1698     ResourceMark rm(current);
1699     tty->print("handle_wrong_method reresolving call to");
1700     callee_method->print_short_name(tty);
1701     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1702   }
1703 #endif
1704 
1705   return callee_method;
1706 }
1707 
1708 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1709   // The faulting unsafe accesses should be changed to throw the error
1710   // synchronously instead. Meanwhile the faulting instruction will be
1711   // skipped over (effectively turning it into a no-op) and an
1712   // asynchronous exception will be raised which the thread will
1713   // handle at a later point. If the instruction is a load it will
1714   // return garbage.
1715 
1716   // Request an async exception.

1939     if (CheckJNICalls) {
1940       fatal("Object has been unlocked by JNI");
1941     }
1942     return;
1943   }
1944   ObjectSynchronizer::exit(obj, lock, current);
1945 }
1946 
1947 // Handles the uncommon cases of monitor unlocking in compiled code
1948 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
1949   assert(current == JavaThread::current(), "pre-condition");
1950   SharedRuntime::monitor_exit_helper(obj, lock, current);
1951 JRT_END
1952 
1953 #ifndef PRODUCT
1954 
1955 void SharedRuntime::print_statistics() {
1956   ttyLocker ttyl;
1957   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
1958 
1959   SharedRuntime::print_ic_miss_histogram();
1960 
1961   // Dump the JRT_ENTRY counters
1962   if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
1963   if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
1964   if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
1965   if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
1966   if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
1967   if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
1968 
1969   tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
1970   tty->print_cr("%5u wrong method", _wrong_method_ctr);
1971   tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
1972   tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
1973   tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
1974 
1975   if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
1976   if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
1977   if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
1978   if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
1979   if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
1980   if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
1981   if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
1982   if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
1983   if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
1984   if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
1985   if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
1986   if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
1987   if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
1988   if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
1989   if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
1990   if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
1991 
1992   AdapterHandlerLibrary::print_statistics();
1993 
1994   if (xtty != nullptr)  xtty->tail("statistics");
1995 }
1996 

































1997 inline double percent(int64_t x, int64_t y) {
1998   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
1999 }
2000 
2001 class MethodArityHistogram {
2002  public:
2003   enum { MAX_ARITY = 256 };
2004  private:
2005   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2006   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2007   static uint64_t _total_compiled_calls;
2008   static uint64_t _max_compiled_calls_per_method;
2009   static int _max_arity;                       // max. arity seen
2010   static int _max_size;                        // max. arg size seen
2011 
2012   static void add_method_to_histogram(nmethod* nm) {
2013     Method* method = (nm == nullptr) ? nullptr : nm->method();
2014     if (method != nullptr) {
2015       ArgumentCount args(method->signature());
2016       int arity   = args.size() + (method->is_static() ? 0 : 1);

2061     // Take the Compile_lock to protect against changes in the CodeBlob structures
2062     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2063     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2064     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2065     _max_arity = _max_size = 0;
2066     _total_compiled_calls = 0;
2067     _max_compiled_calls_per_method = 0;
2068     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2069     CodeCache::nmethods_do(add_method_to_histogram);
2070     print_histogram();
2071   }
2072 };
2073 
2074 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2075 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2076 uint64_t MethodArityHistogram::_total_compiled_calls;
2077 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2078 int MethodArityHistogram::_max_arity;
2079 int MethodArityHistogram::_max_size;
2080 
2081 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2082   tty->print_cr("Calls from compiled code:");
2083   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2084   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2085   int64_t mono_i = _nof_interface_calls;
2086   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2087   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2088   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2089   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2090   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2091   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2092   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2093   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2094   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2095   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2096   tty->cr();
2097   tty->print_cr("Note 1: counter updates are not MT-safe.");
2098   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2099   tty->print_cr("        %% in nested categories are relative to their category");
2100   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2101   tty->cr();

2314                   AdapterFingerPrint::equals>;
2315 static AdapterHandlerTable* _adapter_handler_table;
2316 
2317 // Find a entry with the same fingerprint if it exists
2318 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2319   NOT_PRODUCT(_lookups++);
2320   assert_lock_strong(AdapterHandlerLibrary_lock);
2321   AdapterFingerPrint fp(total_args_passed, sig_bt);
2322   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2323   if (entry != nullptr) {
2324 #ifndef PRODUCT
2325     if (fp.is_compact()) _compact++;
2326     _hits++;
2327 #endif
2328     return *entry;
2329   }
2330   return nullptr;
2331 }
2332 
2333 #ifndef PRODUCT
2334 static void print_table_statistics() {
2335   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2336     return sizeof(*key) + sizeof(*a);
2337   };
2338   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2339   ts.print(tty, "AdapterHandlerTable");
2340   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2341                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2342   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2343                 _lookups, _equals, _hits, _compact);
2344 }
2345 #endif
2346 
2347 // ---------------------------------------------------------------------------
2348 // Implementation of AdapterHandlerLibrary
2349 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2350 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2351 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2352 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2353 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2354 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2355 const int AdapterHandlerLibrary_size = 16*K;
2356 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2357 
2358 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2359   return _buffer;
2360 }
2361 
2362 static void post_adapter_creation(const AdapterBlob* new_adapter,
2363                                   const AdapterHandlerEntry* entry) {
2364   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2365     char blob_id[256];

2559         delete comparison_entry;
2560       }
2561 #endif
2562       return entry;
2563     }
2564 
2565     entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2566   }
2567 
2568   // Outside of the lock
2569   if (new_adapter != nullptr) {
2570     post_adapter_creation(new_adapter, entry);
2571   }
2572   return entry;
2573 }
2574 
2575 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2576                                                            int total_args_passed,
2577                                                            BasicType* sig_bt,
2578                                                            bool allocate_code_blob) {



2579 
2580   // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2581   // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2582   // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2583   // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2584   bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2585 
2586   VMRegPair stack_regs[16];
2587   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2588 
2589   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2590   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2591   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2592   CodeBuffer buffer(buf);
2593   short buffer_locs[20];
2594   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2595                                           sizeof(buffer_locs)/sizeof(relocInfo));
2596 
2597   // Make a C heap allocated version of the fingerprint to store in the adapter
2598   AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);

2770       AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2771                               method->is_static(), total_args_passed);
2772       BasicType* sig_bt = si.basic_types();
2773       assert(si.slots() == total_args_passed, "");
2774       BasicType ret_type = si.return_type();
2775 
2776       // Now get the compiled-Java arguments layout.
2777       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2778 
2779       // Generate the compiled-to-native wrapper code
2780       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
2781 
2782       if (nm != nullptr) {
2783         {
2784           MutexLocker pl(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
2785           if (nm->make_in_use()) {
2786             method->set_code(method, nm);
2787           }
2788         }
2789 
2790         DirectiveSet* directive = DirectivesStack::getDefaultDirective(CompileBroker::compiler(CompLevel_simple));
2791         if (directive->PrintAssemblyOption) {
2792           nm->print_code();
2793         }
2794         DirectivesStack::release(directive);
2795       }
2796     }
2797   } // Unlock AdapterHandlerLibrary_lock
2798 
2799 
2800   // Install the generated code.
2801   if (nm != nullptr) {
2802     const char *msg = method->is_static() ? "(static)" : "";
2803     CompileTask::print_ul(nm, msg);
2804     if (PrintCompilation) {
2805       ttyLocker ttyl;
2806       CompileTask::print(tty, nm, msg);
2807     }
2808     nm->post_compiled_method_load_event();
2809   }
2810 }

3008   assert(found, "Should have found handler");
3009 }
3010 
3011 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3012   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3013   if (get_i2c_entry() != nullptr) {
3014     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3015   }
3016   if (get_c2i_entry() != nullptr) {
3017     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3018   }
3019   if (get_c2i_unverified_entry() != nullptr) {
3020     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3021   }
3022   if (get_c2i_no_clinit_check_entry() != nullptr) {
3023     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3024   }
3025   st->cr();
3026 }
3027 
3028 #ifndef PRODUCT
3029 
3030 void AdapterHandlerLibrary::print_statistics() {
3031   print_table_statistics();
3032 }
3033 
3034 #endif /* PRODUCT */
3035 
3036 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3037   assert(current == JavaThread::current(), "pre-condition");
3038   StackOverflow* overflow_state = current->stack_overflow_state();
3039   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3040   overflow_state->set_reserved_stack_activation(current->stack_base());
3041 JRT_END
3042 
3043 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3044   ResourceMark rm(current);
3045   frame activation;
3046   CompiledMethod* nm = nullptr;
3047   int count = 1;
3048 
3049   assert(fr.is_java_frame(), "Must start on Java frame");
3050 
3051   RegisterMap map(JavaThread::current(),
3052                   RegisterMap::UpdateMap::skip,
3053                   RegisterMap::ProcessFrames::skip,
3054                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3055   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/classLoader.hpp"
  27 #include "classfile/javaClasses.inline.hpp"
  28 #include "classfile/stringTable.hpp"
  29 #include "classfile/vmClasses.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "code/codeCache.hpp"
  32 #include "code/compiledIC.hpp"
  33 #include "code/compiledMethod.inline.hpp"
  34 #include "code/scopeDesc.hpp"
  35 #include "code/vtableStubs.hpp"
  36 #include "compiler/abstractCompiler.hpp"
  37 #include "compiler/compileBroker.hpp"
  38 #include "compiler/disassembler.hpp"
  39 #include "gc/shared/barrierSet.hpp"
  40 #include "gc/shared/collectedHeap.hpp"
  41 #include "gc/shared/gcLocker.inline.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "interpreter/interpreterRuntime.hpp"
  44 #include "jvm.h"
  45 #include "jfr/jfrEvents.hpp"
  46 #include "logging/log.hpp"
  47 #include "memory/resourceArea.hpp"
  48 #include "memory/universe.hpp"
  49 #include "metaprogramming/primitiveConversions.hpp"
  50 #include "oops/klass.hpp"
  51 #include "oops/method.inline.hpp"
  52 #include "oops/objArrayKlass.hpp"
  53 #include "oops/oop.inline.hpp"
  54 #include "prims/forte.hpp"
  55 #include "prims/jvmtiExport.hpp"
  56 #include "prims/jvmtiThreadState.hpp"
  57 #include "prims/methodHandles.hpp"
  58 #include "prims/nativeLookup.hpp"
  59 #include "runtime/atomic.hpp"
  60 #include "runtime/frame.inline.hpp"
  61 #include "runtime/handles.inline.hpp"
  62 #include "runtime/init.hpp"
  63 #include "runtime/interfaceSupport.inline.hpp"
  64 #include "runtime/java.hpp"
  65 #include "runtime/javaCalls.hpp"
  66 #include "runtime/jniHandles.inline.hpp"
  67 #include "runtime/perfData.inline.hpp"
  68 #include "runtime/sharedRuntime.hpp"
  69 #include "runtime/stackWatermarkSet.hpp"
  70 #include "runtime/stubRoutines.hpp"
  71 #include "runtime/synchronizer.hpp"
  72 #include "runtime/vframe.inline.hpp"
  73 #include "runtime/vframeArray.hpp"
  74 #include "runtime/vm_version.hpp"
  75 #include "services/management.hpp"
  76 #include "utilities/copy.hpp"
  77 #include "utilities/dtrace.hpp"
  78 #include "utilities/events.hpp"
  79 #include "utilities/resourceHash.hpp"
  80 #include "utilities/macros.hpp"
  81 #include "utilities/xmlstream.hpp"
  82 #ifdef COMPILER1
  83 #include "c1/c1_Runtime1.hpp"
  84 #endif
  85 #if INCLUDE_JFR
  86 #include "jfr/jfr.hpp"
  87 #endif
  88 
  89 // Shared stub locations
  90 RuntimeStub*        SharedRuntime::_wrong_method_blob;
  91 RuntimeStub*        SharedRuntime::_wrong_method_abstract_blob;
  92 RuntimeStub*        SharedRuntime::_ic_miss_blob;
  93 RuntimeStub*        SharedRuntime::_resolve_opt_virtual_call_blob;
  94 RuntimeStub*        SharedRuntime::_resolve_virtual_call_blob;
  95 RuntimeStub*        SharedRuntime::_resolve_static_call_blob;
  96 address             SharedRuntime::_resolve_static_call_entry;
  97 
  98 DeoptimizationBlob* SharedRuntime::_deopt_blob;
  99 SafepointBlob*      SharedRuntime::_polling_page_vectors_safepoint_handler_blob;
 100 SafepointBlob*      SharedRuntime::_polling_page_safepoint_handler_blob;
 101 SafepointBlob*      SharedRuntime::_polling_page_return_handler_blob;
 102 
 103 #ifdef COMPILER2
 104 UncommonTrapBlob*   SharedRuntime::_uncommon_trap_blob;
 105 #endif // COMPILER2
 106 
 107 nmethod*            SharedRuntime::_cont_doYield_stub;
 108 
 109 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
 110 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time     = nullptr;
 111 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time      = nullptr;
 112 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
 113 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time             = nullptr;
 114 
 115 //----------------------------generate_stubs-----------------------------------
 116 void SharedRuntime::generate_stubs() {
 117   _wrong_method_blob                   = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method),          "wrong_method_stub");
 118   _wrong_method_abstract_blob          = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
 119   _ic_miss_blob                        = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss),  "ic_miss_stub");
 120   _resolve_opt_virtual_call_blob       = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C),   "resolve_opt_virtual_call");
 121   _resolve_virtual_call_blob           = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C),       "resolve_virtual_call");
 122   _resolve_static_call_blob            = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C),        "resolve_static_call");
 123   _resolve_static_call_entry           = _resolve_static_call_blob->entry_point();
 124 
 125   AdapterHandlerLibrary::initialize();
 126 
 127 #if COMPILER2_OR_JVMCI
 128   // Vectors are generated only by C2 and JVMCI.
 129   bool support_wide = is_wide_vector(MaxVectorSize);
 130   if (support_wide) {
 131     _polling_page_vectors_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_VECTOR_LOOP);
 132   }
 133 #endif // COMPILER2_OR_JVMCI
 134   _polling_page_safepoint_handler_blob = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_LOOP);
 135   _polling_page_return_handler_blob    = generate_handler_blob(CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception), POLL_AT_RETURN);
 136 
 137   generate_deopt_blob();
 138 
 139 #ifdef COMPILER2
 140   generate_uncommon_trap_blob();
 141 #endif // COMPILER2
 142   if (UsePerfData) {
 143     EXCEPTION_MARK;
 144     NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
 145     NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time,     SUN_CI, "resovle_virtual_call");
 146     NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time,      SUN_CI, "resovle_static_call");
 147     NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
 148     NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time ,            SUN_CI, "ic_miss");
 149     if (HAS_PENDING_EXCEPTION) {
 150       vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
 151     }
 152   }
 153 }
 154 
 155 void SharedRuntime::print_counters_on(outputStream* st) {
 156   st->print_cr("SharedRuntime:");
 157   if (UsePerfData) {
 158     st->print_cr("  resolve_opt_virtual_call: %5ldms (elapsed) %5ldms (thread) / %5d events",
 159                  _perf_resolve_opt_virtual_total_time->elapsed_counter_value_ms(),
 160                  _perf_resolve_opt_virtual_total_time->thread_counter_value_ms(),
 161                  _resolve_opt_virtual_ctr);
 162     st->print_cr("  resolve_virtual_call:     %5ldms (elapsed) %5ldms (thread) / %5d events",
 163                  _perf_resolve_virtual_total_time->elapsed_counter_value_ms(),
 164                  _perf_resolve_virtual_total_time->thread_counter_value_ms(),
 165                  _resolve_virtual_ctr);
 166     st->print_cr("  resolve_static_call:      %5ldms (elapsed) %5ldms (thread) / %5d events",
 167                  _perf_resolve_static_total_time->elapsed_counter_value_ms(),
 168                  _perf_resolve_static_total_time->thread_counter_value_ms(),
 169                  _resolve_static_ctr);
 170     st->print_cr("  handle_wrong_method:      %5ldms (elapsed) %5ldms (thread) / %5d events",
 171                  _perf_handle_wrong_method_total_time->elapsed_counter_value_ms(),
 172                  _perf_handle_wrong_method_total_time->thread_counter_value_ms(),
 173                  _wrong_method_ctr);
 174     st->print_cr("  ic_miss:                  %5ldms (elapsed) %5ldms (thread) / %5d events",
 175                  _perf_ic_miss_total_time->elapsed_counter_value_ms(),
 176                  _perf_ic_miss_total_time->thread_counter_value_ms(),
 177                  _ic_miss_ctr);
 178 
 179     jlong total_elapsed_time_ms = Management::ticks_to_ms(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
 180                                                           _perf_resolve_virtual_total_time->elapsed_counter_value() +
 181                                                           _perf_resolve_static_total_time->elapsed_counter_value() +
 182                                                           _perf_handle_wrong_method_total_time->elapsed_counter_value() +
 183                                                           _perf_ic_miss_total_time->elapsed_counter_value());
 184     jlong total_thread_time_ms = Management::ticks_to_ms(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
 185                                                           _perf_resolve_virtual_total_time->thread_counter_value() +
 186                                                           _perf_resolve_static_total_time->thread_counter_value() +
 187                                                           _perf_handle_wrong_method_total_time->thread_counter_value() +
 188                                                           _perf_ic_miss_total_time->thread_counter_value());
 189     st->print_cr("Total:                      %5ldms (elapsed) %5ldms (thread)", total_elapsed_time_ms, total_thread_time_ms);
 190   } else {
 191     st->print_cr("  no data (UsePerfData is turned off)");
 192   }
 193 }
 194 
 195 #include <math.h>
 196 
 197 // Implementation of SharedRuntime
 198 

 199 // For statistics
 200 uint SharedRuntime::_ic_miss_ctr = 0;
 201 uint SharedRuntime::_wrong_method_ctr = 0;
 202 uint SharedRuntime::_resolve_static_ctr = 0;
 203 uint SharedRuntime::_resolve_virtual_ctr = 0;
 204 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
 205 
 206 #ifndef PRODUCT
 207 uint SharedRuntime::_implicit_null_throws = 0;
 208 uint SharedRuntime::_implicit_div0_throws = 0;
 209 
 210 int64_t SharedRuntime::_nof_normal_calls = 0;
 211 int64_t SharedRuntime::_nof_inlined_calls = 0;
 212 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
 213 int64_t SharedRuntime::_nof_static_calls = 0;
 214 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
 215 int64_t SharedRuntime::_nof_interface_calls = 0;
 216 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
 217 
 218 uint SharedRuntime::_new_instance_ctr=0;
 219 uint SharedRuntime::_new_array_ctr=0;
 220 uint SharedRuntime::_multi2_ctr=0;
 221 uint SharedRuntime::_multi3_ctr=0;
 222 uint SharedRuntime::_multi4_ctr=0;
 223 uint SharedRuntime::_multi5_ctr=0;
 224 uint SharedRuntime::_mon_enter_stub_ctr=0;
 225 uint SharedRuntime::_mon_exit_stub_ctr=0;
 226 uint SharedRuntime::_mon_enter_ctr=0;

 239 uint SharedRuntime::_rethrow_ctr=0;
 240 
 241 int     SharedRuntime::_ICmiss_index                    = 0;
 242 int     SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
 243 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
 244 
 245 
 246 void SharedRuntime::trace_ic_miss(address at) {
 247   for (int i = 0; i < _ICmiss_index; i++) {
 248     if (_ICmiss_at[i] == at) {
 249       _ICmiss_count[i]++;
 250       return;
 251     }
 252   }
 253   int index = _ICmiss_index++;
 254   if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
 255   _ICmiss_at[index] = at;
 256   _ICmiss_count[index] = 1;
 257 }
 258 
 259 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
 260   if (ICMissHistogram) {
 261     st->print_cr("IC Miss Histogram:");
 262     int tot_misses = 0;
 263     for (int i = 0; i < _ICmiss_index; i++) {
 264       st->print_cr("  at: " INTPTR_FORMAT "  nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
 265       tot_misses += _ICmiss_count[i];
 266     }
 267     st->print_cr("Total IC misses: %7d", tot_misses);
 268   }
 269 }
 270 #endif // !PRODUCT
 271 
 272 
 273 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
 274   return x * y;
 275 JRT_END
 276 
 277 
 278 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
 279   if (x == min_jlong && y == CONST64(-1)) {
 280     return x;
 281   } else {
 282     return x / y;
 283   }
 284 JRT_END
 285 
 286 
 287 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
 288   if (x == min_jlong && y == CONST64(-1)) {
 289     return 0;
 290   } else {

 721   jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
 722   JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
 723   JNIHandles::destroy_local(vthread);
 724 JRT_END
 725 #endif // INCLUDE_JVMTI
 726 
 727 // The interpreter code to call this tracing function is only
 728 // called/generated when UL is on for redefine, class and has the right level
 729 // and tags. Since obsolete methods are never compiled, we don't have
 730 // to modify the compilers to generate calls to this function.
 731 //
 732 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
 733     JavaThread* thread, Method* method))
 734   if (method->is_obsolete()) {
 735     // We are calling an obsolete method, but this is not necessarily
 736     // an error. Our method could have been redefined just after we
 737     // fetched the Method* from the constant pool.
 738     ResourceMark rm;
 739     log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
 740   }
 741 
 742   LogStreamHandle(Trace, interpreter, bytecode) log;
 743   if (log.is_enabled()) {
 744     ResourceMark rm;
 745     log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
 746               p2i(thread),
 747               (method->is_static() ? "static" : "virtual"),
 748               method->name_and_sig_as_C_string(),
 749               (method->is_native() ? " native" : ""),
 750               (thread->class_being_initialized() != nullptr ? " clinit" : ""),
 751               (method->method_holder()->is_initialized() ? "" : " being_initialized"));
 752   }
 753   return 0;
 754 JRT_END
 755 
 756 // ret_pc points into caller; we are returning caller's exception handler
 757 // for given exception
 758 // Note that the implementation of this method assumes it's only called when an exception has actually occured
 759 address SharedRuntime::compute_compiled_exc_handler(CompiledMethod* cm, address ret_pc, Handle& exception,
 760                                                     bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
 761   assert(cm != nullptr, "must exist");
 762   ResourceMark rm;
 763 
 764 #if INCLUDE_JVMCI
 765   if (cm->is_compiled_by_jvmci()) {
 766     // lookup exception handler for this pc
 767     int catch_pco = pointer_delta_as_int(ret_pc, cm->code_begin());
 768     ExceptionHandlerTable table(cm);
 769     HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
 770     if (t != nullptr) {
 771       return cm->code_begin() + t->pco();
 772     } else {

1373 
1374   // determine call info & receiver
1375   // note: a) receiver is null for static calls
1376   //       b) an exception is thrown if receiver is null for non-static calls
1377   CallInfo call_info;
1378   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1379   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1380 
1381   NoSafepointVerifier nsv;
1382 
1383   methodHandle callee_method(current, call_info.selected_method());
1384 
1385   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1386          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1387          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1388          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1389          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1390 
1391   assert(!caller_nm->is_unloading(), "It should not be unloading");
1392 

1393   // tracing/debugging/statistics
1394   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1395                  (is_virtual) ? (&_resolve_virtual_ctr) :
1396                                 (&_resolve_static_ctr);
1397   Atomic::inc(addr);
1398 
1399 #ifndef PRODUCT
1400   if (TraceCallFixup) {
1401     ResourceMark rm(current);
1402     tty->print("resolving %s%s (%s) call to",
1403                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1404                Bytecodes::name(invoke_code));
1405     callee_method->print_short_name(tty);
1406     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1407                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1408   }
1409 #endif
1410 
1411   if (invoke_code == Bytecodes::_invokestatic) {
1412     assert(callee_method->method_holder()->is_initialized() ||
1413            callee_method->method_holder()->is_init_thread(current),
1414            "invalid class initialization state for invoke_static");
1415     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1416       // In order to keep class initialization check, do not patch call
1417       // site for static call when the class is not fully initialized.
1418       // Proper check is enforced by call site re-resolution on every invocation.
1419       //

1435 
1436   // Make sure the callee nmethod does not get deoptimized and removed before
1437   // we are done patching the code.
1438 
1439 
1440   CompiledICLocker ml(caller_nm);
1441   if (is_virtual && !is_optimized) {
1442     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1443     inline_cache->update(&call_info, receiver->klass());
1444   } else {
1445     // Callsite is a direct call - set it to the destination method
1446     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1447     callsite->set(callee_method);
1448   }
1449 
1450   return callee_method;
1451 }
1452 
1453 // Inline caches exist only in compiled code
1454 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1455   PerfTraceTime timer(_perf_ic_miss_total_time);
1456 
1457 #ifdef ASSERT
1458   RegisterMap reg_map(current,
1459                       RegisterMap::UpdateMap::skip,
1460                       RegisterMap::ProcessFrames::include,
1461                       RegisterMap::WalkContinuation::skip);
1462   frame stub_frame = current->last_frame();
1463   assert(stub_frame.is_runtime_frame(), "sanity check");
1464   frame caller_frame = stub_frame.sender(&reg_map);
1465   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1466 #endif /* ASSERT */
1467 
1468   methodHandle callee_method;
1469   JRT_BLOCK
1470     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1471     // Return Method* through TLS
1472     current->set_vm_result_2(callee_method());
1473   JRT_BLOCK_END
1474   // return compiled code entry point after potential safepoints
1475   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1476   return callee_method->verified_code_entry();
1477 JRT_END
1478 
1479 
1480 // Handle call site that has been made non-entrant
1481 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1482   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1483 
1484   // 6243940 We might end up in here if the callee is deoptimized
1485   // as we race to call it.  We don't want to take a safepoint if
1486   // the caller was interpreted because the caller frame will look
1487   // interpreted to the stack walkers and arguments are now
1488   // "compiled" so it is much better to make this transition
1489   // invisible to the stack walking code. The i2c path will
1490   // place the callee method in the callee_target. It is stashed
1491   // there because if we try and find the callee by normal means a
1492   // safepoint is possible and have trouble gc'ing the compiled args.
1493   RegisterMap reg_map(current,
1494                       RegisterMap::UpdateMap::skip,
1495                       RegisterMap::ProcessFrames::include,
1496                       RegisterMap::WalkContinuation::skip);
1497   frame stub_frame = current->last_frame();
1498   assert(stub_frame.is_runtime_frame(), "sanity check");
1499   frame caller_frame = stub_frame.sender(&reg_map);
1500 
1501   if (caller_frame.is_interpreted_frame() ||
1502       caller_frame.is_entry_frame() ||
1503       caller_frame.is_upcall_stub_frame()) {

1517       return callee->get_c2i_no_clinit_check_entry();
1518     } else {
1519       return callee->get_c2i_entry();
1520     }
1521   }
1522 
1523   // Must be compiled to compiled path which is safe to stackwalk
1524   methodHandle callee_method;
1525   JRT_BLOCK
1526     // Force resolving of caller (if we called from compiled frame)
1527     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1528     current->set_vm_result_2(callee_method());
1529   JRT_BLOCK_END
1530   // return compiled code entry point after potential safepoints
1531   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1532   return callee_method->verified_code_entry();
1533 JRT_END
1534 
1535 // Handle abstract method call
1536 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1537   PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1538 
1539   // Verbose error message for AbstractMethodError.
1540   // Get the called method from the invoke bytecode.
1541   vframeStream vfst(current, true);
1542   assert(!vfst.at_end(), "Java frame must exist");
1543   methodHandle caller(current, vfst.method());
1544   Bytecode_invoke invoke(caller, vfst.bci());
1545   DEBUG_ONLY( invoke.verify(); )
1546 
1547   // Find the compiled caller frame.
1548   RegisterMap reg_map(current,
1549                       RegisterMap::UpdateMap::include,
1550                       RegisterMap::ProcessFrames::include,
1551                       RegisterMap::WalkContinuation::skip);
1552   frame stubFrame = current->last_frame();
1553   assert(stubFrame.is_runtime_frame(), "must be");
1554   frame callerFrame = stubFrame.sender(&reg_map);
1555   assert(callerFrame.is_compiled_frame(), "must be");
1556 
1557   // Install exception and return forward entry.
1558   address res = StubRoutines::throw_AbstractMethodError_entry();
1559   JRT_BLOCK
1560     methodHandle callee(current, invoke.static_target(current));
1561     if (!callee.is_null()) {
1562       oop recv = callerFrame.retrieve_receiver(&reg_map);
1563       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1564       res = StubRoutines::forward_exception_entry();
1565       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1566     }
1567   JRT_BLOCK_END
1568   return res;
1569 JRT_END
1570 
1571 
1572 // resolve a static call and patch code
1573 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1574   PerfTraceTime timer(_perf_resolve_static_total_time);
1575 
1576   methodHandle callee_method;
1577   bool enter_special = false;
1578   JRT_BLOCK
1579     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1580     current->set_vm_result_2(callee_method());
1581 
1582     if (current->is_interp_only_mode()) {
1583       RegisterMap reg_map(current,
1584                           RegisterMap::UpdateMap::skip,
1585                           RegisterMap::ProcessFrames::include,
1586                           RegisterMap::WalkContinuation::skip);
1587       frame stub_frame = current->last_frame();
1588       assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1589       frame caller = stub_frame.sender(&reg_map);
1590       enter_special = caller.cb() != nullptr && caller.cb()->is_compiled()
1591         && caller.cb()->as_compiled_method()->method()->is_continuation_enter_intrinsic();
1592     }
1593   JRT_BLOCK_END
1594 
1595   if (current->is_interp_only_mode() && enter_special) {
1596     // enterSpecial is compiled and calls this method to resolve the call to Continuation::enter
1597     // but in interp_only_mode we need to go to the interpreted entry
1598     // The c2i won't patch in this mode -- see fixup_callers_callsite
1599     //
1600     // This should probably be done in all cases, not just enterSpecial (see JDK-8218403),
1601     // but that's part of a larger fix, and the situation is worse for enterSpecial, as it has no
1602     // interpreted version.
1603     return callee_method->get_c2i_entry();
1604   }
1605 
1606   // return compiled code entry point after potential safepoints
1607   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1608   return callee_method->verified_code_entry();
1609 JRT_END
1610 
1611 
1612 // resolve virtual call and update inline cache to monomorphic
1613 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1614   PerfTraceTime timer(_perf_resolve_virtual_total_time);
1615 
1616   methodHandle callee_method;
1617   JRT_BLOCK
1618     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1619     current->set_vm_result_2(callee_method());
1620   JRT_BLOCK_END
1621   // return compiled code entry point after potential safepoints
1622   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1623   return callee_method->verified_code_entry();
1624 JRT_END
1625 
1626 
1627 // Resolve a virtual call that can be statically bound (e.g., always
1628 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1629 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1630   PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1631 
1632   methodHandle callee_method;
1633   JRT_BLOCK
1634     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1635     current->set_vm_result_2(callee_method());
1636   JRT_BLOCK_END
1637   // return compiled code entry point after potential safepoints
1638   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1639   return callee_method->verified_code_entry();
1640 JRT_END
1641 
1642 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1643   JavaThread* current = THREAD;
1644   ResourceMark rm(current);
1645   CallInfo call_info;
1646   Bytecodes::Code bc;
1647 
1648   // receiver is null for static calls. An exception is thrown for null
1649   // receivers for non-static calls
1650   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1651 
1652   methodHandle callee_method(current, call_info.selected_method());
1653 

1654   Atomic::inc(&_ic_miss_ctr);
1655 
1656 #ifndef PRODUCT
1657   // Statistics & Tracing
1658   if (TraceCallFixup) {
1659     ResourceMark rm(current);
1660     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1661     callee_method->print_short_name(tty);
1662     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1663   }
1664 
1665   if (ICMissHistogram) {
1666     MutexLocker m(VMStatistic_lock);
1667     RegisterMap reg_map(current,
1668                         RegisterMap::UpdateMap::skip,
1669                         RegisterMap::ProcessFrames::include,
1670                         RegisterMap::WalkContinuation::skip);
1671     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1672     // produce statistics under the lock
1673     trace_ic_miss(f.pc());
1674   }
1675 #endif
1676 

1758             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1759             cdc->set_to_clean();
1760             break;
1761           }
1762 
1763           case relocInfo::virtual_call_type: {
1764             // compiled, dispatched call (which used to call an interpreted method)
1765             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1766             inline_cache->set_to_clean();
1767             break;
1768           }
1769           default:
1770             break;
1771         }
1772       }
1773     }
1774   }
1775 
1776   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1777 


1778   Atomic::inc(&_wrong_method_ctr);
1779 
1780 #ifndef PRODUCT
1781   if (TraceCallFixup) {
1782     ResourceMark rm(current);
1783     tty->print("handle_wrong_method reresolving call to");
1784     callee_method->print_short_name(tty);
1785     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1786   }
1787 #endif
1788 
1789   return callee_method;
1790 }
1791 
1792 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1793   // The faulting unsafe accesses should be changed to throw the error
1794   // synchronously instead. Meanwhile the faulting instruction will be
1795   // skipped over (effectively turning it into a no-op) and an
1796   // asynchronous exception will be raised which the thread will
1797   // handle at a later point. If the instruction is a load it will
1798   // return garbage.
1799 
1800   // Request an async exception.

2023     if (CheckJNICalls) {
2024       fatal("Object has been unlocked by JNI");
2025     }
2026     return;
2027   }
2028   ObjectSynchronizer::exit(obj, lock, current);
2029 }
2030 
2031 // Handles the uncommon cases of monitor unlocking in compiled code
2032 JRT_LEAF(void, SharedRuntime::complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current))
2033   assert(current == JavaThread::current(), "pre-condition");
2034   SharedRuntime::monitor_exit_helper(obj, lock, current);
2035 JRT_END
2036 
2037 #ifndef PRODUCT
2038 
2039 void SharedRuntime::print_statistics() {
2040   ttyLocker ttyl;
2041   if (xtty != nullptr)  xtty->head("statistics type='SharedRuntime'");
2042 
2043   SharedRuntime::print_ic_miss_histogram_on(tty);
2044   SharedRuntime::print_counters_on(tty);
2045   AdapterHandlerLibrary::print_statistics_on(tty);































2046 
2047   if (xtty != nullptr)  xtty->tail("statistics");
2048 }
2049 
2050 //void SharedRuntime::print_counters_on(outputStream* st) {
2051 //  // Dump the JRT_ENTRY counters
2052 //  if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2053 //  if (_new_array_ctr)    st->print_cr("%5u new array requires GC", _new_array_ctr);
2054 //  if (_multi2_ctr)       st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2055 //  if (_multi3_ctr)       st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2056 //  if (_multi4_ctr)       st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2057 //  if (_multi5_ctr)       st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2058 //
2059 //  st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2060 //  st->print_cr("%5u wrong method", _wrong_method_ctr);
2061 //  st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2062 //  st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2063 //  st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2064 //
2065 //  if (_mon_enter_stub_ctr)       st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2066 //  if (_mon_exit_stub_ctr)        st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2067 //  if (_mon_enter_ctr)            st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2068 //  if (_mon_exit_ctr)             st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2069 //  if (_partial_subtype_ctr)      st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2070 //  if (_jbyte_array_copy_ctr)     st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2071 //  if (_jshort_array_copy_ctr)    st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2072 //  if (_jint_array_copy_ctr)      st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2073 //  if (_jlong_array_copy_ctr)     st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2074 //  if (_oop_array_copy_ctr)       st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2075 //  if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2076 //  if (_unsafe_array_copy_ctr)    st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2077 //  if (_generic_array_copy_ctr)   st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2078 //  if (_slow_array_copy_ctr)      st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2079 //  if (_find_handler_ctr)         st->print_cr("%5u find exception handler", _find_handler_ctr);
2080 //  if (_rethrow_ctr)              st->print_cr("%5u rethrow handler", _rethrow_ctr);
2081 //}
2082 
2083 inline double percent(int64_t x, int64_t y) {
2084   return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2085 }
2086 
2087 class MethodArityHistogram {
2088  public:
2089   enum { MAX_ARITY = 256 };
2090  private:
2091   static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2092   static uint64_t _size_histogram[MAX_ARITY];  // histogram of arg size in words
2093   static uint64_t _total_compiled_calls;
2094   static uint64_t _max_compiled_calls_per_method;
2095   static int _max_arity;                       // max. arity seen
2096   static int _max_size;                        // max. arg size seen
2097 
2098   static void add_method_to_histogram(nmethod* nm) {
2099     Method* method = (nm == nullptr) ? nullptr : nm->method();
2100     if (method != nullptr) {
2101       ArgumentCount args(method->signature());
2102       int arity   = args.size() + (method->is_static() ? 0 : 1);

2147     // Take the Compile_lock to protect against changes in the CodeBlob structures
2148     MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2149     // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2150     MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2151     _max_arity = _max_size = 0;
2152     _total_compiled_calls = 0;
2153     _max_compiled_calls_per_method = 0;
2154     for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2155     CodeCache::nmethods_do(add_method_to_histogram);
2156     print_histogram();
2157   }
2158 };
2159 
2160 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2161 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2162 uint64_t MethodArityHistogram::_total_compiled_calls;
2163 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2164 int MethodArityHistogram::_max_arity;
2165 int MethodArityHistogram::_max_size;
2166 
2167 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2168   tty->print_cr("Calls from compiled code:");
2169   int64_t total  = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2170   int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2171   int64_t mono_i = _nof_interface_calls;
2172   tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%)  total non-inlined   ", total);
2173   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls       ", _nof_normal_calls, percent(_nof_normal_calls, total));
2174   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2175   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_c, percent(mono_c, _nof_normal_calls));
2176   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- megamorphic      ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2177   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls     ", _nof_interface_calls, percent(_nof_interface_calls, total));
2178   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2179   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- monomorphic      ", mono_i, percent(mono_i, _nof_interface_calls));
2180   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2181   tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) |  |- inlined          ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2182   tty->cr();
2183   tty->print_cr("Note 1: counter updates are not MT-safe.");
2184   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2185   tty->print_cr("        %% in nested categories are relative to their category");
2186   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2187   tty->cr();

2400                   AdapterFingerPrint::equals>;
2401 static AdapterHandlerTable* _adapter_handler_table;
2402 
2403 // Find a entry with the same fingerprint if it exists
2404 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2405   NOT_PRODUCT(_lookups++);
2406   assert_lock_strong(AdapterHandlerLibrary_lock);
2407   AdapterFingerPrint fp(total_args_passed, sig_bt);
2408   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2409   if (entry != nullptr) {
2410 #ifndef PRODUCT
2411     if (fp.is_compact()) _compact++;
2412     _hits++;
2413 #endif
2414     return *entry;
2415   }
2416   return nullptr;
2417 }
2418 
2419 #ifndef PRODUCT
2420 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2421   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2422     return sizeof(*key) + sizeof(*a);
2423   };
2424   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2425   ts.print(st, "AdapterHandlerTable");
2426   st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2427                _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2428   st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2429                _lookups, _equals, _hits, _compact);
2430 }
2431 #endif // !PRODUCT
2432 
2433 // ---------------------------------------------------------------------------
2434 // Implementation of AdapterHandlerLibrary
2435 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2436 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2437 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2438 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2439 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2440 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2441 const int AdapterHandlerLibrary_size = 16*K;
2442 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2443 
2444 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2445   return _buffer;
2446 }
2447 
2448 static void post_adapter_creation(const AdapterBlob* new_adapter,
2449                                   const AdapterHandlerEntry* entry) {
2450   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2451     char blob_id[256];

2645         delete comparison_entry;
2646       }
2647 #endif
2648       return entry;
2649     }
2650 
2651     entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2652   }
2653 
2654   // Outside of the lock
2655   if (new_adapter != nullptr) {
2656     post_adapter_creation(new_adapter, entry);
2657   }
2658   return entry;
2659 }
2660 
2661 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2662                                                            int total_args_passed,
2663                                                            BasicType* sig_bt,
2664                                                            bool allocate_code_blob) {
2665   if (UsePerfData) {
2666     ClassLoader::perf_method_adapters_count()->inc();
2667   }
2668 
2669   // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2670   // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2671   // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2672   // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2673   bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2674 
2675   VMRegPair stack_regs[16];
2676   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2677 
2678   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2679   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2680   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2681   CodeBuffer buffer(buf);
2682   short buffer_locs[20];
2683   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2684                                           sizeof(buffer_locs)/sizeof(relocInfo));
2685 
2686   // Make a C heap allocated version of the fingerprint to store in the adapter
2687   AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);

2859       AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2860                               method->is_static(), total_args_passed);
2861       BasicType* sig_bt = si.basic_types();
2862       assert(si.slots() == total_args_passed, "");
2863       BasicType ret_type = si.return_type();
2864 
2865       // Now get the compiled-Java arguments layout.
2866       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2867 
2868       // Generate the compiled-to-native wrapper code
2869       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
2870 
2871       if (nm != nullptr) {
2872         {
2873           MutexLocker pl(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
2874           if (nm->make_in_use()) {
2875             method->set_code(method, nm);
2876           }
2877         }
2878 
2879         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
2880         if (directive->PrintAssemblyOption) {
2881           nm->print_code();
2882         }
2883         DirectivesStack::release(directive);
2884       }
2885     }
2886   } // Unlock AdapterHandlerLibrary_lock
2887 
2888 
2889   // Install the generated code.
2890   if (nm != nullptr) {
2891     const char *msg = method->is_static() ? "(static)" : "";
2892     CompileTask::print_ul(nm, msg);
2893     if (PrintCompilation) {
2894       ttyLocker ttyl;
2895       CompileTask::print(tty, nm, msg);
2896     }
2897     nm->post_compiled_method_load_event();
2898   }
2899 }

3097   assert(found, "Should have found handler");
3098 }
3099 
3100 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3101   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3102   if (get_i2c_entry() != nullptr) {
3103     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3104   }
3105   if (get_c2i_entry() != nullptr) {
3106     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3107   }
3108   if (get_c2i_unverified_entry() != nullptr) {
3109     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3110   }
3111   if (get_c2i_no_clinit_check_entry() != nullptr) {
3112     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3113   }
3114   st->cr();
3115 }
3116 








3117 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3118   assert(current == JavaThread::current(), "pre-condition");
3119   StackOverflow* overflow_state = current->stack_overflow_state();
3120   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3121   overflow_state->set_reserved_stack_activation(current->stack_base());
3122 JRT_END
3123 
3124 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3125   ResourceMark rm(current);
3126   frame activation;
3127   CompiledMethod* nm = nullptr;
3128   int count = 1;
3129 
3130   assert(fr.is_java_frame(), "Must start on Java frame");
3131 
3132   RegisterMap map(JavaThread::current(),
3133                   RegisterMap::UpdateMap::skip,
3134                   RegisterMap::ProcessFrames::skip,
3135                   RegisterMap::WalkContinuation::skip); // don't walk continuations
3136   for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
< prev index next >